Untitled

 avatar
unknown
plain_text
5 months ago
871 B
3
Indexable
import subprocess
import time

def run_tinyllama(input_text, delay=5):
    # Start TinyLlama server in the background
    process = subprocess.Popen(
        ["/home/pi/local_llm_assistant/teddy.llamafile"],  # Path to your TinyLlama executable
        stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
    )
    
    # Delay to ensure the server is fully started
    print(f"Waiting {delay} seconds for TinyLlama to initialize...")
    time.sleep(delay)

    # Send input and capture the output
    stdout, stderr = process.communicate(input=input_text)
    
    # Check for any errors
    if stderr:
        print("Error:", stderr)
    
    return stdout.strip()  # Remove any extra whitespace

# Example usage
user_input = "Hi"
response = run_tinyllama(user_input, delay=5)  # Delay in seconds
print("Response from TinyLlama:", response)
Editor is loading...
Leave a Comment