Untitled
full_dataset = SentimentDataset(pickle_file="dataset.pkl") train_size = int(0.8 * len(full_dataset)) val_size = len(full_dataset) - train_size train_dataset, val_dataset = random_split(full_dataset, [train_size, val_size]) train_loader = DataLoader(train_dataset, batch_size=8, shuffle=True) val_loader = DataLoader(val_dataset, batch_size=8, shuffle=False) # Instantiate the model model = SentimentClassifier() # Define loss function and optimizer criterion = nn.CrossEntropyLoss() optimizer = optim.Adam(model.parameters(), lr=0.001) # Train the model trained_model = train_model(model, train_loader, val_loader, criterion, optimizer, num_epochs=5, device='cpu') # Dummy input (batch_size=4, input_dim=768) dummy_input = torch.randn(4, 768) # Forward pass output = trained_model(dummy_input) print("Output logits:", output)
Leave a Comment