from datetime import datetime
from google.cloud import bigquery
from datetime import timedelta
# Generate a unique table name using the current timestamp
table_name = "temp_results_" + datetime.utcnow().strftime('%Y%m%d%H%M%S')
# Your SQL, targeting the new unique table
_sql = f"""
CREATE TABLE your_dataset.{table_name} AS
SELECT {cust_id}, DATE(end_observation) AS end_observation, value,
CONCAT('{table_api}', '__', token) AS token_sfx,
'{table_api}' AS source
FROM (
SELECT {cust_id}, end_observation,
{', '.join([f"CAST({x} AS FLOAT64) AS {x}" for x in _cols2process])}
FROM {table_id}
WHERE end_observation='{end_observation}'
) AS subquery
UNPIVOT(value FOR token IN ({', '.join(_cols2process)}))
"""
# Execute this SQL to create the table with the unique name in BigQuery
# Assuming you have set up authentication etc. already.
client = bigquery.Client()
client.query(_sql).result()
# Set expiration time
table_ref = client.dataset("your_dataset").table(table_name)
table = client.get_table(table_ref)
expiration_time = datetime.utcnow() + timedelta(minutes=5)
table.expires = expiration_time
client.update_table(table, ["expires"])