// Set the batch size
batchSize := 1000 // Adjust the batch size according to your requirements
// Execute the query and get the result iterator
iterator, err := query.Read(ctx)
if err != nil {
log.Fatal(err)
}
// Initialize variables for tracking progress
rowCount := 0
batchCount := 0
// Read the data in batches
for {
// Create a slice to hold the batch of rows
var rows []*bigquery.Values
for i := 0; i < batchSize; i++ {
var row []bigquery.Value
err := iterator.Next(&row)
if err == iterator.Done {
break
}
if err != nil {
log.Fatal(err)
}
rows = append(rows, &row)
}
// Exit the loop if no more rows are available
if len(rows) == 0 {
break
}
// Process the batch of rows and prepare batch statements for Scylla
batch := gocql.NewBatch(gocql.LoggedBatch)
for _, row := range rows {
// Extract the necessary values from the BigQuery row
// ...
// Prepare the INSERT statement for Scylla
stmt := session.Query("INSERT INTO your_table (col1, col2, ...) VALUES (?, ?, ...)", value1, value2, ...)
// Add the statement to the batch
batch.Query(stmt)
rowCount++
}
// Execute the batch INSERT statements
err = session.ExecuteBatch(batch)
if err != nil {
log.Fatal(err)
}
// Update progress
batchCount++
fmt.Printf("Processed batch %d, total rows processed: %d\n", batchCount, rowCount)
}