Spaces:
Sleeping
Sleeping
Upload 7 files
Browse files- README.md +16 -11
- REPORT.md +0 -0
- app.py +37 -37
- detect.py +17 -17
- parse_logs.py +6 -0
- requirements.txt +4 -0
- sample_logs/example_log.txt +5 -0
README.md
CHANGED
|
@@ -1,11 +1,16 @@
|
|
| 1 |
-
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
LogShield β AI-Powered Log Anomaly Detector
|
| 2 |
+
π Automatically detect anomalies in log files using AI (NLP).
|
| 3 |
+
π Features
|
| 4 |
+
Upload a .txt log file
|
| 5 |
+
|
| 6 |
+
Automatically classify log lines as:
|
| 7 |
+
|
| 8 |
+
β
Normal
|
| 9 |
+
|
| 10 |
+
π¨ Anomaly
|
| 11 |
+
|
| 12 |
+
Visualize error counts in a chart
|
| 13 |
+
|
| 14 |
+
Easy-to-use Streamlit web app
|
| 15 |
+
|
| 16 |
+
Powered by Hugging Face Transformers (distilbert-base-uncased-finetuned-sst-2-english)
|
REPORT.md
ADDED
|
File without changes
|
app.py
CHANGED
|
@@ -1,37 +1,37 @@
|
|
| 1 |
-
import streamlit as st
|
| 2 |
-
from parse_logs import parse_log_file
|
| 3 |
-
from detect import load_model, detect_anomaly
|
| 4 |
-
import matplotlib.pyplot as plt
|
| 5 |
-
|
| 6 |
-
st.set_page_config(page_title="LogShield - AI-Powered Log Anomaly Detector")
|
| 7 |
-
|
| 8 |
-
st.title("π LogShield - AI-Powered Log Anomaly Detector")
|
| 9 |
-
|
| 10 |
-
uploaded_file = st.file_uploader("π Upload your log file (.txt):", type="txt")
|
| 11 |
-
|
| 12 |
-
if uploaded_file:
|
| 13 |
-
st.success("β
File Uploaded Successfully!")
|
| 14 |
-
|
| 15 |
-
logs = parse_log_file(uploaded_file.read())
|
| 16 |
-
st.write(f"Total Log Lines: {len(logs)}")
|
| 17 |
-
|
| 18 |
-
with st.spinner("Analyzing Logs..."):
|
| 19 |
-
model = load_model()
|
| 20 |
-
results = detect_anomaly(model, logs)
|
| 21 |
-
|
| 22 |
-
# Display results
|
| 23 |
-
anomaly_count = results.count("Anomaly")
|
| 24 |
-
normal_count = results.count("Normal")
|
| 25 |
-
|
| 26 |
-
st.subheader("π¨ Anomaly Detection Results:")
|
| 27 |
-
for log, result in zip(logs, results):
|
| 28 |
-
color = "red" if result == "Anomaly" else "green"
|
| 29 |
-
st.markdown(f"<span style='color:{color}'><strong>{result}</strong>: {log}</span>", unsafe_allow_html=True)
|
| 30 |
-
|
| 31 |
-
# Display Summary
|
| 32 |
-
st.subheader("π Summary Chart")
|
| 33 |
-
fig, ax = plt.subplots()
|
| 34 |
-
ax.bar(["Normal", "Anomaly"], [normal_count, anomaly_count], color=["green", "red"])
|
| 35 |
-
ax.set_ylabel("Count")
|
| 36 |
-
ax.set_title("Log Summary")
|
| 37 |
-
st.pyplot(fig)
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
from parse_logs import parse_log_file
|
| 3 |
+
from detect import load_model, detect_anomaly
|
| 4 |
+
import matplotlib.pyplot as plt
|
| 5 |
+
|
| 6 |
+
st.set_page_config(page_title="LogShield - AI-Powered Log Anomaly Detector")
|
| 7 |
+
|
| 8 |
+
st.title("π LogShield - AI-Powered Log Anomaly Detector")
|
| 9 |
+
|
| 10 |
+
uploaded_file = st.file_uploader("π Upload your log file (.txt):", type="txt")
|
| 11 |
+
|
| 12 |
+
if uploaded_file:
|
| 13 |
+
st.success("β
File Uploaded Successfully!")
|
| 14 |
+
|
| 15 |
+
logs = parse_log_file(uploaded_file.read())
|
| 16 |
+
st.write(f"Total Log Lines: {len(logs)}")
|
| 17 |
+
|
| 18 |
+
with st.spinner("Analyzing Logs..."):
|
| 19 |
+
model = load_model()
|
| 20 |
+
results = detect_anomaly(model, logs)
|
| 21 |
+
|
| 22 |
+
# Display results
|
| 23 |
+
anomaly_count = results.count("Anomaly")
|
| 24 |
+
normal_count = results.count("Normal")
|
| 25 |
+
|
| 26 |
+
st.subheader("π¨ Anomaly Detection Results:")
|
| 27 |
+
for log, result in zip(logs, results):
|
| 28 |
+
color = "red" if result == "Anomaly" else "green"
|
| 29 |
+
st.markdown(f"<span style='color:{color}'><strong>{result}</strong>: {log}</span>", unsafe_allow_html=True)
|
| 30 |
+
|
| 31 |
+
# Display Summary
|
| 32 |
+
st.subheader("π Summary Chart")
|
| 33 |
+
fig, ax = plt.subplots()
|
| 34 |
+
ax.bar(["Normal", "Anomaly"], [normal_count, anomaly_count], color=["green", "red"])
|
| 35 |
+
ax.set_ylabel("Count")
|
| 36 |
+
ax.set_title("Log Summary")
|
| 37 |
+
st.pyplot(fig)
|
detect.py
CHANGED
|
@@ -1,17 +1,17 @@
|
|
| 1 |
-
from transformers import pipeline
|
| 2 |
-
|
| 3 |
-
# Load sentiment-analysis model
|
| 4 |
-
def load_model():
|
| 5 |
-
return pipeline("sentiment-analysis", model="distilbert-base-uncased-finetuned-sst-2-english")
|
| 6 |
-
|
| 7 |
-
# Predict anomaly for each log line
|
| 8 |
-
def detect_anomaly(model, logs):
|
| 9 |
-
results = model(logs)
|
| 10 |
-
predictions = []
|
| 11 |
-
for result in results:
|
| 12 |
-
# Treat negative sentiment as Anomaly, positive/neutral as Normal
|
| 13 |
-
if result['label'] == 'NEGATIVE':
|
| 14 |
-
predictions.append("Anomaly")
|
| 15 |
-
else:
|
| 16 |
-
predictions.append("Normal")
|
| 17 |
-
return predictions
|
|
|
|
| 1 |
+
from transformers import pipeline
|
| 2 |
+
|
| 3 |
+
# Load sentiment-analysis model
|
| 4 |
+
def load_model():
|
| 5 |
+
return pipeline("sentiment-analysis", model="distilbert-base-uncased-finetuned-sst-2-english")
|
| 6 |
+
|
| 7 |
+
# Predict anomaly for each log line
|
| 8 |
+
def detect_anomaly(model, logs):
|
| 9 |
+
results = model(logs)
|
| 10 |
+
predictions = []
|
| 11 |
+
for result in results:
|
| 12 |
+
# Treat negative sentiment as Anomaly, positive/neutral as Normal
|
| 13 |
+
if result['label'] == 'NEGATIVE':
|
| 14 |
+
predictions.append("Anomaly")
|
| 15 |
+
else:
|
| 16 |
+
predictions.append("Normal")
|
| 17 |
+
return predictions
|
parse_logs.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
def parse_log_file(file_content):
|
| 2 |
+
# Decode file and split into lines
|
| 3 |
+
lines = file_content.decode("utf-8").split("\n")
|
| 4 |
+
# Clean and remove empty lines
|
| 5 |
+
clean_logs = [line.strip() for line in lines if line.strip()]
|
| 6 |
+
return clean_logs
|
requirements.txt
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit
|
| 2 |
+
transformers
|
| 3 |
+
torch
|
| 4 |
+
matplotlib
|
sample_logs/example_log.txt
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
INFO 2025-07-10 10:00:00 - Server started successfully
|
| 2 |
+
WARNING 2025-07-10 10:01:00 - High memory usage detected
|
| 3 |
+
ERROR 2025-07-10 10:02:00 - Database connection failed
|
| 4 |
+
INFO 2025-07-10 10:03:00 - Background job completed
|
| 5 |
+
CRITICAL 2025-07-10 10:04:00 - Unauthorized access detected
|