@@ -165,7 +165,23 @@ def extract_file_data_and_name(logs):
165
165
name = re .sub (r"^b'|'$" , '' , name )
166
166
name = name .encode ('latin1' ).decode ('unicode_escape' )
167
167
file_name = name
168
- return file_data , file_name
168
+
169
+ # Extracting src, dst, sport, dport, and timestamp values
170
+ for item in entry :
171
+ if isinstance (item , list ) and item [0 ] == "IP" :
172
+ ip_header = item [1 ]
173
+ src = ip_header .get ("src" , "" )
174
+ dst = ip_header .get ("dst" , "" )
175
+ if isinstance (item , list ) and item [0 ] == "TCP" :
176
+ tcp_header = item [1 ]
177
+ sport = tcp_header .get ("sport" , 0 )
178
+ dport = tcp_header .get ("dport" , 0 )
179
+ for option in tcp_header .get ("options" , []):
180
+ if isinstance (option , list ) and option [0 ] == "Timestamp" :
181
+ timestamp = option [1 ]
182
+
183
+
184
+ return file_data , file_name , src , dst , sport , dport , timestamp
169
185
170
186
# Function to recursively extract packet layer details
171
187
def extract_packet_details (packet ):
@@ -193,7 +209,7 @@ def extract_packet_details(packet):
193
209
logs = json .load (file )
194
210
195
211
# Extract the file data and name
196
- file_data , file_name = extract_file_data_and_name (logs )
212
+ file_data , file_name , src , dst , sport , dport , timestamp = extract_file_data_and_name (logs )
197
213
198
214
# Cleaning the file name
199
215
clean_file_name = re .sub (r'\x00' , '' , file_name ) # remove null bytes
@@ -211,7 +227,12 @@ def extract_packet_details(packet):
211
227
# Prepare metadata
212
228
metadata = {
213
229
"FileName" : clean_file_name ,
214
- "FileSize" : convert_size (file_size )
230
+ "FileSize" : convert_size (file_size ),
231
+ "src" : src ,
232
+ "dst" : dst ,
233
+ "sport" : sport ,
234
+ "dport" : dport ,
235
+ "timestamp" : timestamp
215
236
}
216
237
217
238
# Save metadata to JSON file
0 commit comments