Untitled

 avatar
user_9286359
plain_text
18 days ago
958 B
2
Indexable
import onnxruntime as ort
import numpy as np

# Path to the ONNX model
onnx_path = "single_layer_lstm_static_shape.onnx"

# Load the ONNX model
session = ort.InferenceSession(onnx_path)

# Generate random input matching the model's input shape (batch size 1, sequence length 15, input size 10)
input_shape = (1, 15, 10)  # Adjust according to your model's input size
random_input = np.random.rand(*input_shape).astype(np.float32)

# Get the model's input and output names
input_name = session.get_inputs()[0].name
output_name = session.get_outputs()[0].name

# Pass the random input through the ONNX model
output = session.run([output_name], {input_name: random_input})[0]

# Save the input to a raw file
input_file = "input.raw"
random_input.tofile(input_file)

# Save the output to a raw file
output_file = "output.raw"
output.tofile(output_file)

print(f"Random input has been saved to {input_file}")
print(f"Model output has been saved to {output_file}")
Leave a Comment