import apache_beam as beam
import pandas as pd
import re
import os
import gzip
list_of_files=[]
folder_path = "/var/log/EventLog/"
class ConvertLogFileToDataFrameDoFn(beam.DoFn):
def process(self, element):
file_path = element
print(file_path)
list_of_files.append(file_path)
print(list_of_files)
# read the text file and split each line by comma
with open(file_path, "r") as f:
lines = f.readlines()
# create an empty DataFrame with the specified columns
columns=['timestamp','hostname','process_name','process_id','log_text']
df = pd.DataFrame(columns=columns)
# use regular expressions to extract the values for each column
data = []
for line in lines:
match = re.search(r'Time: (\S+).*Computer: (\S+).*Event Id: (\d+),Message: (.*)Level: (\d+),Channel: (\S+)', line)
if match:
timestamp, hostname, event_id, log_text, level, channel = match.groups()
hostname_prefix = hostname.split(".")[0]
data.append((timestamp, hostname_prefix , channel, event_id, log_text))
# convert the list of tuples to a DataFrame
df = pd.DataFrame(data, columns=columns)
#print(df.head(6))
return [df]
class SplitAndSaveFilesDoFn(beam.DoFn):
def __init__(self, output_dir):
self.output_dir = output_dir
def process(self, element):
print(list_of_files)
df = element
for hostname, group in df.groupby('hostname'):
# get the first part of the hostname before the first dot
hostname_prefix = hostname.split(".")[0]
print(hostname_prefix)
filename = f"{hostname_prefix}-windows.log"
output_filename = os.path.join(self.output_dir, f"{hostname_prefix}-windows.log.gz")
group.to_csv(filename, index=False)
# Gzip the file and give it chmod 777 permissions
with open(filename, 'rb') as f_in:
with gzip.open(output_filename, 'wb') as f_out:
f_out.writelines(f_in)
os.chmod(output_filename, 0o777)
# Remove the original file
os.remove(filename)
#Rename the successfully processed files
for file1 in list_of_files:
processed_filename="Processed_logs_"+os.path.basename(file1)[16:]
os.rename(file1,processed_filename)
return []
with beam.Pipeline() as pipeline:
data_frames = (pipeline
| "Read Input Files" >> beam.Create(os.listdir(folder_path))
| "Get Full File Path" >> beam.Map(lambda x: os.path.join(folder_path, x))
| "Convert Log Data into Dataframe" >> beam.ParDo(ConvertLogFileToDataFrameDoFn())
| "Merge Dataframes" >> beam.CombineGlobally(pd.concat)
| "Split and Save Files" >> beam.ParDo(SplitAndSaveFilesDoFn('/var/log/EventLog/')))