Hmehdi515's picture
Upload 5 files
a61db6a verified
import pandas as pd
from unsloth import FastLanguageModel
from transformers import TextStreamer
import torch
import random
import logging
def setup_logging():
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
return logging.getLogger(__name__)
def print_separator(title="", char="=", length=80):
"""Print a separator with optional title"""
if title:
side_length = (length - len(title) - 2) // 2
print(char * side_length + f" {title} " + char * side_length)
else:
print(char * length)
def format_flow_prompt(row):
"""Format a single network flow into a prompt"""
flow_text = f"""Network Flow Description:
Source: {row['IPV4_SRC_ADDR']} (Port: {row['L4_SRC_PORT']})
Destination: {row['IPV4_DST_ADDR']} (Port: {row['L4_DST_PORT']})
Protocol Information:
- Protocol ID: {row['PROTOCOL']}
- Layer 7 Protocol: {row['L7_PROTO']}
- TCP Flags: {row['TCP_FLAGS']}
Traffic Metrics:
- Bytes: {row['IN_BYTES']} inbound, {row['OUT_BYTES']} outbound
- Packets: {row['IN_PKTS']} inbound, {row['OUT_PKTS']} outbound
- Duration: {row['FLOW_DURATION_MILLISECONDS']} milliseconds"""
# Format in LLaMA-3 style
return f"""<|begin_of_text|><|start_header_id|>user<|end_header_id|>
Analyze this network flow for potential security threats:
{flow_text}<|eot_id|><|start_header_id|>assistant<|end_header_id|>"""
def analyze_single_flow(model_path="cybersec_model_output/checkpoint-4329",
test_file="data/test.csv",
index=None,
attack_type=None):
"""Analyze a single network flow and show the model's complete response"""
logger = setup_logging()
print_separator("LOADING DATA AND MODEL", "=")
# Load test data
logger.info(f"Loading test data from {test_file}")
test_df = pd.read_csv(test_file)
# Select sample based on criteria
if attack_type:
attack_samples = test_df[test_df['Attack'].str.lower() == attack_type.lower()]
if len(attack_samples) == 0:
raise ValueError(f"No samples found for attack type: {attack_type}")
sample = attack_samples.iloc[random.randint(0, len(attack_samples)-1)]
elif index is not None:
sample = test_df.iloc[index]
else:
sample = test_df.iloc[random.randint(0, len(test_df)-1)]
# Load model
logger.info(f"Loading model from {model_path}")
model, tokenizer = FastLanguageModel.from_pretrained(
model_path,
max_seq_length=2048,
load_in_4bit=True,
)
# Set up tokenizer
tokenizer.pad_token = tokenizer.eos_token
tokenizer.padding_side = "right"
FastLanguageModel.for_inference(model)
print_separator("SAMPLE INFORMATION", "=")
logger.info(f"Selected flow index: {sample.name}")
logger.info(f"True label: {sample['Attack']}")
# Prepare input
prompt = format_flow_prompt(sample)
inputs = tokenizer(
prompt,
return_tensors="pt",
truncation=True,
max_length=2048
).to("cuda")
# Set up streamer for real-time output
streamer = TextStreamer(tokenizer)
with open("model_output.txt", "w") as f:
# Write separators and metadata
f.write("=" * 80 + "\n")
f.write(f"NETWORK FLOW ANALYSIS\n")
f.write("=" * 80 + "\n\n")
f.write("-" * 80 + "\n")
f.write("METADATA\n")
f.write("-" * 80 + "\n")
f.write(f"Flow Index: {sample.name}\n")
f.write(f"True Label: {sample['Attack']}\n\n")
f.write("-" * 80 + "\n")
f.write("INPUT PROMPT\n")
f.write("-" * 80 + "\n")
f.write(f"{prompt}\n\n")
print_separator("MODEL OUTPUT", "=")
logger.info("Generating analysis...")
# Generate and capture output
outputs = model.generate(
**inputs,
max_new_tokens=256,
streamer=streamer,
use_cache=True
)
# Write the complete output
f.write("-" * 80 + "\n")
f.write("COMPLETE OUTPUT (including special tokens)\n")
f.write("-" * 80 + "\n")
full_output = tokenizer.decode(outputs[0], skip_special_tokens=False)
f.write(f"{full_output}\n\n")
# Write cleaned output
f.write("-" * 80 + "\n")
f.write("CLEANED OUTPUT (without special tokens)\n")
f.write("-" * 80 + "\n")
cleaned_output = tokenizer.decode(outputs[0], skip_special_tokens=True)
f.write(cleaned_output)
f.write("\n" + "=" * 80 + "\n")
print_separator()
logger.info("Output saved to model_output.txt")
print_separator()
def main():
# Example usage:
print_separator("STARTING ANALYSIS", "=")
# Choose one of these options:
# 1. Random sample
analyze_single_flow()
# 2. Specific attack type
# analyze_single_flow(attack_type="ddos")
# 3. Specific index
# analyze_single_flow(index=42)
print_separator("ANALYSIS COMPLETE", "=")
if __name__ == "__main__":
main()