Compare commits
No commits in common. "main" and "latest" have entirely different histories.
10
.env
10
.env
|
|
@ -2,12 +2,4 @@ MYSQL_HOST="172.17.1.21"
|
|||
MYSQL_USER="root"
|
||||
MYSQL_PASSWORD="N53yBCswuawzBzS445VNAhWVMs3N59Gb9szEsrzXRBzarDqpdETpQeyt5v5CGe"
|
||||
MYSQL_DATABASE="Kunden"
|
||||
MYSQL_AUTH='mysql_native_password'
|
||||
|
||||
## Ticketsystem
|
||||
ZAMMAD_URL = "https://ticket.stines.de/api/v1"
|
||||
ZAMMAD_API_TOKEN ="1v4XGY7cZpBXSfb4s_tIBbywQjcaDV6q65IXQyVXrrBDqVtmAtLxM5tOqIAp0VXZ"
|
||||
|
||||
## API-Server
|
||||
API_SERVER = "http://api.stines.de:8001"
|
||||
API_TOKEN = "^YWUbG7yX*V!tV^KBSd*2c&vdN3wV9a2i7f3hfGFMBYFxi6#mMiJGiaA5KEHE%B*miK%qb7rQ67gmcYP@gqmux8"
|
||||
MYSQL_AUTH='mysql_native_password'
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
apps/ticket_export/exports/RE2025.00012.1.docx
|
||||
apps/ticket_export/exports/RE2025.00013.1.docx
|
||||
apps/ticket_export/exports/RE2025.00014.1.docx
|
||||
|
|
@ -4,8 +4,4 @@ primaryColor="#1abc9c"
|
|||
backgroundColor="#2c3e50"
|
||||
secondaryBackgroundColor="#34495e"
|
||||
textColor="#ffffff"
|
||||
font="sans serif"
|
||||
layout="wide"
|
||||
|
||||
[browser]
|
||||
serverAddress = "reporting.stines.de"
|
||||
font="sans serif"
|
||||
188
app.py
188
app.py
|
|
@ -1,188 +0,0 @@
|
|||
import streamlit as st
|
||||
from streamlit_option_menu import option_menu
|
||||
from sqlalchemy import create_engine
|
||||
import sites.services_reporting as sr
|
||||
import sites.userlist as us
|
||||
import sites.server as s
|
||||
import sites.tickets as ti
|
||||
import sites.lastrun as lr
|
||||
from datetime import datetime
|
||||
from dateutil.relativedelta import relativedelta
|
||||
import mysql.connector
|
||||
import pandas as pd
|
||||
import os
|
||||
|
||||
# Page Settings
|
||||
st.set_page_config(page_title="Reporting", layout="wide")
|
||||
|
||||
start_date = datetime.today().replace(day=1) - relativedelta(months=1)
|
||||
end_date = datetime.today().replace(day=1) - relativedelta(days=1)
|
||||
start_date_lastmonth = datetime.today().replace(day=1) - relativedelta(months=2)
|
||||
end_date_lastmonth = datetime.today().replace(day=1) - relativedelta(months=1) - relativedelta(days=1)
|
||||
|
||||
# Datumsformatierung
|
||||
start_date_format = start_date.strftime("%Y-%m-%d")
|
||||
end_date_format = end_date.strftime("%Y-%m-%d")
|
||||
|
||||
# Load custom CSS
|
||||
def load_css(file_name):
|
||||
with open(file_name) as f:
|
||||
st.markdown(f'<style>{f.read()}</style>', unsafe_allow_html=True)
|
||||
|
||||
def get_customer_used_service(end_date):
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
|
||||
mycursor = mydb.cursor()
|
||||
|
||||
mycursor.execute(f"""SELECT cs.companyname, cs.customer_ID, cs.services_ID,cs.name from Kunden.`daily.customer.services` cs WHERE add_date LIKE '%{end_date}%'""")
|
||||
|
||||
myresult = mycursor.fetchall()
|
||||
mydb.close()
|
||||
return myresult
|
||||
|
||||
def load_server_list(start_date, end_date):
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
query = f"""
|
||||
SELECT server, cores, customer_id
|
||||
FROM Kunden.`daily.spla.server` sr
|
||||
WHERE sr.timestamp BETWEEN '{start_date}' AND '{end_date}'
|
||||
"""
|
||||
max_server_count = pd.read_sql_query(query, engine)
|
||||
return max_server_count
|
||||
|
||||
|
||||
def load_user_service_list(service_id, customer_id, start_date, end_date):
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
|
||||
query = f"""
|
||||
SELECT COUNT(*) AS max_count
|
||||
FROM (
|
||||
SELECT username
|
||||
FROM Kunden.`daily.user.enabled` sr
|
||||
WHERE sr.customer_ID = {customer_id}
|
||||
AND sr.services_ID = {service_id}
|
||||
AND sr.timestamp BETWEEN '{start_date_format}' AND '{end_date_format}'
|
||||
) AS sub;
|
||||
"""
|
||||
max_user_count = pd.read_sql_query(query, engine)
|
||||
return max_user_count.iloc[0]['max_count'] if not max_user_count.empty else 0
|
||||
|
||||
def load_user_disabled(start_date, end_date,customer_id):
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
|
||||
query = f"""
|
||||
SELECT disabledate, username, service_name, customer_name, ticketnumber, comment
|
||||
FROM Kunden.`daily.user.disabled` sud
|
||||
WHERE sud.disabledate BETWEEN '{start_date}' AND '{end_date}' AND customer_id = '{customer_id}' AND services_id IN (100,101,116,120) ORDER BY service_name
|
||||
"""
|
||||
all = pd.read_sql_query(query, engine)
|
||||
return all
|
||||
|
||||
def home():
|
||||
st.title("Dashboard")
|
||||
edit_start_date = start_date.strftime("%d.%m.%Y")
|
||||
edit_end_date = end_date.strftime("%d.%m.%Y")
|
||||
edit_start_date_lastmonth = start_date_lastmonth.strftime("%d.%m.%Y")
|
||||
edit_end_date_lastmonth = end_date_lastmonth.strftime("%d.%m.%Y")
|
||||
st.subheader(f"Übersicht {edit_start_date} - {edit_end_date}")
|
||||
previous_value = None
|
||||
columns = None
|
||||
|
||||
c = 0
|
||||
print(end_date)
|
||||
for i in get_customer_used_service(end_date.strftime("%Y-%m-%d")):
|
||||
print(i)
|
||||
if previous_value != i[1]:
|
||||
st.subheader(f"{i[0]}")
|
||||
columns = st.columns(4)
|
||||
df = load_user_disabled(start_date_lastmonth, end_date_lastmonth, i[1])
|
||||
|
||||
if not df.empty:
|
||||
st.text(f"Deaktivierte User {edit_start_date_lastmonth} - {edit_end_date_lastmonth}")
|
||||
st.data_editor(df,use_container_width=True)
|
||||
c = 0
|
||||
|
||||
active_users = load_user_service_list(i[2], i[1], start_date, end_date)
|
||||
|
||||
# Filter nach dem spezifischen Service und zähle die eindeutigen Benutzernamen
|
||||
disabled_users_count = 0
|
||||
if not df.empty and 'service_name' in df.columns and 'username' in df.columns:
|
||||
# Annahme: i[3] enthält den Service-Namen, der mit der 'service_name'-Spalte übereinstimmt
|
||||
service_filtered_df = df[df['service_name'] == i[3]]
|
||||
disabled_users_count = service_filtered_df['username'].nunique()
|
||||
|
||||
if not active_users:
|
||||
st.info(f"Kunde {i[0]} - Service {i[3]} - Not Data found!")
|
||||
else:
|
||||
columns[c].metric(
|
||||
label=f"Aktive {i[3]} User",
|
||||
value=active_users,
|
||||
delta=f"-{disabled_users_count}" if disabled_users_count > 0 else None,
|
||||
delta_color="inverse"
|
||||
)
|
||||
c += 1
|
||||
previous_value = i[1]
|
||||
|
||||
col1, = st.columns(1)
|
||||
df = load_server_list(start_date,end_date)
|
||||
grouped = df.groupby('server')['cores'].count()*8/2
|
||||
grouped_series = df.groupby('server')['cores'].max()
|
||||
grouped_str = grouped_series.to_string(header=False)
|
||||
df['cores'] = pd.to_numeric(df['cores'], errors='coerce')
|
||||
server_cores = df.groupby('server')['cores'].max()
|
||||
count_cores = server_cores.count()
|
||||
with col1:
|
||||
st.header("CPU Liste")
|
||||
st.text(
|
||||
f"Anzahl der Cores:\n{grouped_str}\n "
|
||||
)
|
||||
st.text(f"Gesamte Anzahl der Cores: {count_cores}")
|
||||
st.text(f"Berechung der Core-Pakete = Anzahl der Cores ({count_cores}) * Core-Pakete aus SPLA (8) / 2")
|
||||
st.header(f"Gesamt : {str(grouped.sum()).split('.')[0]} Pakete")
|
||||
|
||||
# Ausgabe der Ergebnisse
|
||||
|
||||
# Navigation bar using streamlit-option-menu
|
||||
with st.sidebar:
|
||||
selected_page = option_menu(
|
||||
menu_title="Navigation", # required
|
||||
options=["Dashboard", "Services Reporting", "User Filter", "Server", "Tickets", "Last-Run"], # required
|
||||
icons=["house", "bar-chart", "filter", "server", "ticket", "clock"], # optional
|
||||
menu_icon="cast", # optional
|
||||
default_index=0, # optional
|
||||
orientation="vertikal", # horizontal navigation
|
||||
)
|
||||
|
||||
# Page display logic based on selected option
|
||||
if selected_page == "Dashboard":
|
||||
home()
|
||||
elif selected_page == "Services Reporting":
|
||||
sr.services_reporting()
|
||||
elif selected_page == "User Filter":
|
||||
us.user_filter()
|
||||
elif selected_page == "Server":
|
||||
s.server_filter()
|
||||
elif selected_page == "Tickets":
|
||||
ti.ticket_filter()
|
||||
elif selected_page == "Last-Run":
|
||||
lr.user_filter()
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
MYSQL_HOST="172.17.1.21"
|
||||
MYSQL_USER="root"
|
||||
MYSQL_PASSWORD="N53yBCswuawzBzS445VNAhWVMs3N59Gb9szEsrzXRBzarDqpdETpQeyt5v5CGe"
|
||||
MYSQL_DATABASE="Kunden"
|
||||
MYSQL_AUTH='mysql_native_password'
|
||||
|
||||
## Ticketsystem
|
||||
ZAMMAD_URL = "https://ticket.stines.de/api/v1"
|
||||
ZAMMAD_API_TOKEN ="1v4XGY7cZpBXSfb4s_tIBbywQjcaDV6q65IXQyVXrrBDqVtmAtLxM5tOqIAp0VXZ"
|
||||
|
||||
## API-Server
|
||||
API_SERVER = "http://api.stines.de:8001"
|
||||
API_TOKEN = "^YWUbG7yX*V!tV^KBSd*2c&vdN3wV9a2i7f3hfGFMBYFxi6#mMiJGiaA5KEHE%B*miK%qb7rQ67gmcYP@gqmux8"
|
||||
Binary file not shown.
Binary file not shown.
|
|
@ -1,289 +0,0 @@
|
|||
import mysql.connector
|
||||
from docx import Document
|
||||
from datetime import datetime
|
||||
from docx.shared import Pt
|
||||
from docx.oxml.ns import qn
|
||||
from docx.enum.text import WD_ALIGN_PARAGRAPH, WD_BREAK
|
||||
from docx.enum.table import WD_ALIGN_VERTICAL
|
||||
from docx.oxml import OxmlElement
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
from dotenv import load_dotenv
|
||||
from dateutil.relativedelta import relativedelta
|
||||
|
||||
load_dotenv()
|
||||
|
||||
def fetch_tickets_from_database():
|
||||
# Verbindung zur Datenbank herstellen
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
cursor = mydb.cursor()
|
||||
|
||||
# Tickets abrufen, inklusive customer_ID
|
||||
cursor.execute(f"""
|
||||
SELECT t.`number`, t.title, t.createdate, t.`type`, t.customer_ID, tct.firstdate, t.time, s.price, t.service_ID, t.tags
|
||||
FROM Kunden.tickets t
|
||||
JOIN Kunden.`tickets.customer.timerange` tct ON t.customer_ID = tct.customer_ID
|
||||
JOIN Kunden.services s ON s.service_ID = t.service_ID
|
||||
WHERE closedate >= DATE_SUB(CURDATE(), INTERVAL 32 DAY)
|
||||
ORDER by t.createdate ASC
|
||||
""")
|
||||
tickets = cursor.fetchall()
|
||||
mydb.close()
|
||||
return tickets
|
||||
|
||||
def fetch_customer_data(customer_id):
|
||||
# Verbindung zur Datenbank herstellen
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
cursor = mydb.cursor()
|
||||
|
||||
# Kundendaten basierend auf customer_ID abrufen
|
||||
cursor.execute("""SELECT co.companyname, co.street,co.housenumber, co.postcode, co.city, c.customer FROM company co
|
||||
JOIN customers c ON c.customer_ID = co.customer_ID
|
||||
WHERE co.customer_id = %s""", (customer_id,))
|
||||
customer_data = cursor.fetchone()
|
||||
|
||||
mydb.close()
|
||||
return customer_data
|
||||
|
||||
def fetch_customer_servicetime(customer_id):
|
||||
# Verbindung zur Datenbank herstellen
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
cursor = mydb.cursor()
|
||||
|
||||
# Kundendaten basierend auf customer_ID abrufen
|
||||
cursor.execute("""SELECT servicetime FROM `tickets.customers.servicetime`
|
||||
WHERE customer_id = %s""", (customer_id,))
|
||||
customer_servicetime = cursor.fetchone()
|
||||
|
||||
mydb.close()
|
||||
return customer_servicetime
|
||||
|
||||
|
||||
def fetch_customer_price(price, customer_ID, service_ID):
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
cursor = mydb.cursor()
|
||||
|
||||
# Prozentwert aus der Datenbank abrufen
|
||||
cursor.execute(f"""SELECT percent FROM `customers.pricechange`
|
||||
WHERE customer_ID = {customer_ID} and service_ID = {service_ID}""")
|
||||
customer_price_percent = cursor.fetchone()
|
||||
mydb.close()
|
||||
|
||||
# Preis in Minuten umrechnen (da der aktuelle Preis in Stunden angegeben ist)
|
||||
price_per_minute = price / 60 # Umrechnung von Stunden auf Minuten
|
||||
|
||||
# Falls ein Prozentwert existiert, diesen auf den Minutenpreis anwenden
|
||||
if customer_price_percent:
|
||||
price_per_minute += price_per_minute * (customer_price_percent[0] / 100)
|
||||
# print(f"Preis pro Minute nach Aufschlag von {customer_price_percent[0]}%: {price_per_minute:.2f} €")
|
||||
else:
|
||||
print(f"Standard Preis pro Minute: {price_per_minute:.2f} €")
|
||||
|
||||
return price_per_minute
|
||||
|
||||
def replace_text_in_run_with_format(run, key, value):
|
||||
if f"{{{key}}}" in run.text:
|
||||
run.text = run.text.replace(f"{{{key}}}", str(value))
|
||||
run.font.name = 'Verdana'
|
||||
run.font.size = Pt(10)
|
||||
run.font.bold = False
|
||||
run.font.italic = False
|
||||
|
||||
def insert_page_break(paragraph):
|
||||
run = paragraph.add_run()
|
||||
run.add_break(WD_BREAK.PAGE)
|
||||
|
||||
def replace_text_in_paragraph(paragraph, key, value):
|
||||
for run in paragraph.runs:
|
||||
replace_text_in_run_with_format(run, key, value)
|
||||
|
||||
def set_cell_border(cell, **kwargs):
|
||||
tc = cell._tc
|
||||
tcPr = tc.get_or_add_tcPr()
|
||||
|
||||
borders = OxmlElement('w:tcBorders')
|
||||
|
||||
for border_name in ["top", "left", "bottom", "right"]:
|
||||
border = kwargs.get(border_name, None)
|
||||
if border:
|
||||
element = OxmlElement(f"w:{border_name}")
|
||||
element.set(qn("w:val"), border.get("val", "single"))
|
||||
element.set(qn("w:sz"), border.get("sz", "4"))
|
||||
element.set(qn("w:space"), border.get("space", "0"))
|
||||
element.set(qn("w:color"), border.get("color", "000000"))
|
||||
borders.append(element)
|
||||
|
||||
tcPr.append(borders)
|
||||
|
||||
def fill_template(doc_path, output_path, data, tickets):
|
||||
doc = Document(doc_path)
|
||||
print("Korrekt")
|
||||
# Platzhalter in normalem Text ersetzen
|
||||
for paragraph in doc.paragraphs:
|
||||
for key, value in data.items():
|
||||
if f"{{{key}}}" in paragraph.text:
|
||||
replace_text_in_paragraph(paragraph, key, value)
|
||||
if "{page_breaker}" in paragraph.text:
|
||||
if len(tickets) >= 7:
|
||||
paragraph.text = paragraph.text.replace("{page_breaker}", "")
|
||||
insert_page_break(paragraph)
|
||||
else:
|
||||
paragraph.text = paragraph.text.replace("{page_breaker}", "")
|
||||
|
||||
# Platzhalter in der ersten Tabelle ersetzen
|
||||
first_table = doc.tables[0]
|
||||
for row in first_table.rows:
|
||||
for cell in row.cells:
|
||||
for paragraph in cell.paragraphs:
|
||||
for key, value in data.items():
|
||||
replace_text_in_paragraph(paragraph, key, value)
|
||||
|
||||
# Bearbeiten der zweiten Tabelle für Ticketdaten
|
||||
second_table = doc.tables[1]
|
||||
for ticket in tickets:
|
||||
row = second_table.add_row()
|
||||
ticket_number, title, createdate, _, _, _, timerange, _, _, tags = ticket
|
||||
|
||||
print(ticket[9])
|
||||
|
||||
row.cells[0].text = ticket[0] # Ticketnummer
|
||||
if ticket[9]:
|
||||
row.cells[1].text = f"{ticket[1]} - {ticket[9]}" # Tickettitel
|
||||
else:
|
||||
row.cells[1].text = ticket[1]
|
||||
row.cells[2].text = ticket[2].strftime("%d.%m.%Y") # createdate als String formatieren
|
||||
row.cells[3].text = str(ticket[6]) # timerange (time_unit)
|
||||
|
||||
# Formatierung und Ausrichtung für jede Zelle der neuen Zeile setzen
|
||||
for idx, cell in enumerate(row.cells):
|
||||
for paragraph in cell.paragraphs:
|
||||
if idx == 1:
|
||||
paragraph.alignment = WD_ALIGN_PARAGRAPH.LEFT
|
||||
else:
|
||||
paragraph.alignment = WD_ALIGN_PARAGRAPH.CENTER
|
||||
|
||||
for run in paragraph.runs:
|
||||
run.font.name = 'Verdana'
|
||||
run.font.size = Pt(10)
|
||||
run.font.bold = False
|
||||
cell.vertical_alignment = WD_ALIGN_VERTICAL.CENTER
|
||||
set_cell_border(cell, top={"val": "single", "sz": "4", "color": "000000"},
|
||||
bottom={"val": "single", "sz": "4", "color": "000000"},
|
||||
left={"val": "single", "sz": "4", "color": "000000"},
|
||||
right={"val": "single", "sz": "4", "color": "000000"})
|
||||
|
||||
# Berechnungen (z.B. Gesamtzeit, Preise)
|
||||
gesamt_time_unit = sum(int(ticket[6]) for ticket in tickets)
|
||||
data['gesamt_time_unit'] = gesamt_time_unit
|
||||
|
||||
# Zeit größer als Servicetime
|
||||
if gesamt_time_unit >= customer_servicetime[0]:
|
||||
data['sl_time_unit'] = gesamt_time_unit + customer_servicetime[0]
|
||||
data['sl_minus_unit'] = gesamt_time_unit - customer_servicetime[0]
|
||||
data['time_unit_sum'] = gesamt_time_unit + data['zl_time_unit']
|
||||
data['price_per_minute'] = customer_price
|
||||
|
||||
data['price_ex_mwst'] = gesamt_time_unit * data['price_per_minute']
|
||||
data['mwst_set'] = data['price_ex_mwst'] * 0.19
|
||||
data['sum'] = data['price_ex_mwst'] + data['mwst_set']
|
||||
|
||||
data['price_ex_mwst'] = f"{data['price_ex_mwst']:.2f}".replace(".",",")
|
||||
data['mwst_set'] = f"{data['mwst_set']:.2f}".replace(".",",")
|
||||
data['sum'] = f"{data['sum']:.2f}".replace(".",",")
|
||||
|
||||
|
||||
# Platzhalter in der dritten Tabelle ersetzen
|
||||
third_table = doc.tables[2]
|
||||
for row in third_table.rows:
|
||||
for cell in row.cells:
|
||||
for paragraph in cell.paragraphs:
|
||||
for key, value in data.items():
|
||||
if key == "sum":
|
||||
# Setze den Text und mache ihn fett
|
||||
for run in paragraph.runs:
|
||||
if f"{{{key}}}" in run.text:
|
||||
run.text = run.text.replace(f"{{{key}}}", str(value))
|
||||
run.font.bold = True # Fett setzen
|
||||
else:
|
||||
replace_text_in_paragraph(paragraph, key, value)
|
||||
|
||||
doc.save(output_path)
|
||||
|
||||
# Hauptprogramm
|
||||
if __name__ == "__main__":
|
||||
tickets = fetch_tickets_from_database()
|
||||
service_ID = next(iter(set(ticket[8] for ticket in tickets)))
|
||||
price = next(iter(set(ticket[7] for ticket in tickets)))
|
||||
customer_ids = set(ticket[4] for ticket in tickets)
|
||||
|
||||
print(tickets)
|
||||
print(customer_ids)
|
||||
|
||||
for customer_id in customer_ids:
|
||||
customer_data = fetch_customer_data(customer_id)
|
||||
customer_servicetime = fetch_customer_servicetime(customer_id)
|
||||
customer_price = fetch_customer_price(price, customer_id, service_ID)
|
||||
customer_tickets = [ticket for ticket in tickets if ticket[4] == customer_id]
|
||||
print(customer_data)
|
||||
|
||||
if not customer_data:
|
||||
print(f"Keine Kundendaten für Kunden-ID {customer_id} gefunden!")
|
||||
continue
|
||||
|
||||
day = customer_tickets[0][5]
|
||||
now = datetime.now()
|
||||
if now.month == 1:
|
||||
last_month = 12
|
||||
year = now.year - 1
|
||||
else:
|
||||
last_month = now.month - 1
|
||||
year = now.year
|
||||
startdate = datetime(year, last_month, day)
|
||||
enddate = startdate + relativedelta(months=1) - timedelta(days=1)
|
||||
|
||||
data = {
|
||||
"name": customer_data[0],
|
||||
"street": customer_data[1],
|
||||
"housenumber": customer_data[2],
|
||||
"postcode": customer_data[3],
|
||||
"city": customer_data[4],
|
||||
"cnumber": customer_data[5],
|
||||
"year": datetime.now().year,
|
||||
"onumber": "1",
|
||||
"startdate": startdate.strftime("%d.%m.%Y"),
|
||||
"enddate": enddate.strftime("%d.%m.%Y"),
|
||||
"today": datetime.now().strftime("%d.%m.%Y"),
|
||||
"price_per_minute": customer_price,
|
||||
"servicetime": customer_servicetime[0],
|
||||
"gesamt_time_unit": 0,
|
||||
"price_ex_mwst": "0,00",
|
||||
"sl_time_unit": 0,
|
||||
"sl_minus_unit": 0,
|
||||
"zl_time_unit": 0,
|
||||
"time_unit_sum": 0,
|
||||
"mwst_set": "0,00",
|
||||
"sum": "0,00",
|
||||
}
|
||||
output_path = f"apps/ticket_export/exports/RE2025.{customer_data[5]}.{data['onumber']}.docx"
|
||||
fill_template('apps/ticket_export/template.docx', output_path, data, customer_tickets)
|
||||
print("True")
|
||||
Binary file not shown.
|
|
@ -1,13 +0,0 @@
|
|||
services:
|
||||
streamlit:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: dockerfile
|
||||
ports:
|
||||
- 80:80
|
||||
networks:
|
||||
- frontend
|
||||
|
||||
networks:
|
||||
frontend:
|
||||
driver: bridge
|
||||
21
dockerfile
21
dockerfile
|
|
@ -1,21 +0,0 @@
|
|||
|
||||
FROM python:3.9-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN apt-get update && apt-get install -y \
|
||||
build-essential \
|
||||
curl \
|
||||
software-properties-common \
|
||||
git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN git clone https://gitlab.stines.de/sebastian.serfling/Reports-Visual.git .
|
||||
|
||||
RUN pip3 install -r requirements.txt
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
HEALTHCHECK CMD curl --fail http://localhost:80/_stcore/health
|
||||
|
||||
ENTRYPOINT ["streamlit", "run", "app.py", "--server.port=80", "--server.address=0.0.0.0"]
|
||||
|
|
@ -0,0 +1,167 @@
|
|||
import streamlit as st
|
||||
import pandas as pd
|
||||
import mysql.connector
|
||||
from datetime import datetime
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
|
||||
# Set Page Name
|
||||
st.set_page_config(page_title="Reporting")
|
||||
|
||||
|
||||
# Load custom CSS
|
||||
def load_css(file_name):
|
||||
with open(file_name) as f:
|
||||
st.markdown(f'<style>{f.read()}</style>', unsafe_allow_html=True)
|
||||
|
||||
load_css('style.css')
|
||||
|
||||
# Function to get filtered data from the database
|
||||
def get_filtered_data(customer_id, service_id, start_date, end_date):
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
|
||||
# Prepare the query
|
||||
query = f"""
|
||||
SELECT DATE_FORMAT(sr.reportingdate, '%Y-%m') AS month,
|
||||
COUNT(DISTINCT sr.username) as count
|
||||
FROM Kunden.`services.reporting` sr
|
||||
JOIN Kunden.services s ON sr.service_ID = s.service_ID
|
||||
WHERE sr.customer_ID = {customer_id}
|
||||
AND sr.service_ID = {service_id}
|
||||
AND sr.username NOT LIKE '%admin%'
|
||||
AND sr.username NOT LIKE '%test%'
|
||||
AND sr.reportingdate BETWEEN '{start_date}' AND '{end_date}'
|
||||
GROUP BY DATE_FORMAT(sr.reportingdate, '%Y-%m')
|
||||
ORDER BY DATE_FORMAT(sr.reportingdate, '%Y-%m');
|
||||
"""
|
||||
|
||||
service_reporting = pd.read_sql_query(query, mydb)
|
||||
mydb.close()
|
||||
return service_reporting
|
||||
|
||||
|
||||
# Fetch initial data for default selections
|
||||
def get_initial_data():
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
# Fetch unique service IDs and names
|
||||
service_id_query = """
|
||||
SELECT DISTINCT s.service_ID, s.name
|
||||
FROM Kunden.`services.reporting` sr
|
||||
JOIN Kunden.services s ON sr.service_ID = s.service_ID
|
||||
"""
|
||||
service_ids = pd.read_sql_query(service_id_query, mydb)
|
||||
|
||||
# Fetch customer information
|
||||
customer_query = """
|
||||
SELECT c.customer_ID, c.customer, co.companyname
|
||||
FROM Kunden.company co
|
||||
JOIN Kunden.customers c ON co.customer_ID = c.customer_ID
|
||||
JOIN Kunden.`services.reporting`sr ON sr.customer_ID = co.customer_ID
|
||||
GROUP BY c.customer_ID, c.customer, co.companyname;
|
||||
"""
|
||||
customers = pd.read_sql_query(customer_query, mydb)
|
||||
|
||||
# Fetch date range
|
||||
date_query = """
|
||||
SELECT MIN(reportingdate) AS min_date, MAX(reportingdate) AS max_date
|
||||
FROM Kunden.`services.reporting`
|
||||
WHERE customer_ID = 5
|
||||
"""
|
||||
date_range = pd.read_sql_query(date_query, mydb)
|
||||
mydb.close()
|
||||
|
||||
return service_ids, customers, date_range
|
||||
|
||||
|
||||
# Define page functions
|
||||
def home():
|
||||
st.title("Home Page")
|
||||
st.write("Welcome to the Home Page!")
|
||||
|
||||
|
||||
def services_reporting():
|
||||
st.title("Reporting :mag_right:")
|
||||
# Get initial data for widgets
|
||||
initial_service_ids, customers, initial_date_range = get_initial_data()
|
||||
# Combine service_ID and name for display
|
||||
service_options = initial_service_ids.apply(lambda row: f"{row['service_ID']} - {row['name']}", axis=1)
|
||||
|
||||
# Add selection widget for customer ID
|
||||
selected_customer = st.selectbox(
|
||||
'Select Customer',
|
||||
customers.apply(lambda row: f"{row['customer_ID']} - {row['companyname']} - {row['customer']}", axis=1).tolist()
|
||||
)
|
||||
|
||||
# Extract customer_ID from selected option
|
||||
selected_customer_id = int(selected_customer.split(' - ')[0])
|
||||
|
||||
# Add selection widget for service ID
|
||||
selected_service = st.selectbox(
|
||||
'Select Service',
|
||||
service_options.tolist()
|
||||
)
|
||||
|
||||
# Extract service_ID from selected option
|
||||
selected_service_id = int(selected_service.split(' - ')[0])
|
||||
|
||||
# Convert date range to datetime objects
|
||||
min_date = initial_date_range['min_date'][0]
|
||||
max_date = initial_date_range['max_date'][0]
|
||||
|
||||
# Add date range selection widget
|
||||
selected_date_range = st.date_input(
|
||||
'Select date range',
|
||||
value=[min_date, max_date],
|
||||
min_value=min_date,
|
||||
max_value=max_date
|
||||
)
|
||||
|
||||
# Format the selected dates as 'YYYY-MM-DD'
|
||||
start_date_str = selected_date_range[0].strftime("%Y-%m-%d")
|
||||
end_date_str = selected_date_range[1].strftime("%Y-%m-%d")
|
||||
|
||||
# Add a button to apply filters
|
||||
if st.button('Apply Filters'):
|
||||
# Fetch filtered data from the database
|
||||
filtered_data = get_filtered_data(selected_customer_id, selected_service_id, start_date_str, end_date_str)
|
||||
|
||||
# Sort the data by month
|
||||
filtered_data = filtered_data.sort_values('month')
|
||||
|
||||
# Create a bar chart with the filtered data
|
||||
if not filtered_data.empty:
|
||||
st.bar_chart(filtered_data.set_index('month')['count'])
|
||||
else:
|
||||
st.write("No data available for the selected filters.")
|
||||
|
||||
|
||||
|
||||
|
||||
if 'page' not in st.session_state:
|
||||
st.session_state.page = 'Home'
|
||||
|
||||
# Sidebar navigation
|
||||
st.sidebar.title("Navigation")
|
||||
if st.sidebar.button('Home'):
|
||||
st.session_state.page = 'Home'
|
||||
if st.sidebar.button('Services Reporting'):
|
||||
st.session_state.page = 'Services Reporting'
|
||||
|
||||
# Page display logic
|
||||
if st.session_state.page == 'Home':
|
||||
home()
|
||||
elif st.session_state.page == 'Services Reporting':
|
||||
services_reporting()
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
mysql-connector-python~=9.0.0
|
||||
python-dotenv~=1.0.1
|
||||
python-dateutil~=2.9.0.post0
|
||||
streamlit~=1.37.1
|
||||
pandas~=2.2.2
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,94 +0,0 @@
|
|||
import streamlit as st
|
||||
import pandas as pd
|
||||
import mysql.connector
|
||||
from datetime import datetime
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
def get_filtered_users(customer_id, service_id):
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
|
||||
query = f"""SELECT * FROM Kunden.`daily.user.online` where DATE(timestamp) = CURDATE() """
|
||||
|
||||
|
||||
if customer_id:
|
||||
query += f"AND customer_ID = {customer_id}"
|
||||
if service_id:
|
||||
query += f" AND services_id = {service_id}"
|
||||
query += " ORDER BY timestamp DESC"
|
||||
users = pd.read_sql_query(query, mydb)
|
||||
print(query)
|
||||
mydb.close()
|
||||
return users
|
||||
|
||||
|
||||
def get_initial_data():
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
# Fetch unique service IDs and names
|
||||
service_id_query = """
|
||||
SELECT DISTINCT s.service_ID, s.name
|
||||
FROM Kunden.services s
|
||||
"""
|
||||
service_ids = pd.read_sql_query(service_id_query, mydb)
|
||||
|
||||
# Fetch customer information
|
||||
customer_query = """
|
||||
SELECT DISTINCT c.customer_ID, c.customer, co.companyname
|
||||
FROM Kunden.company co
|
||||
JOIN Kunden.customers c ON co.customer_ID = c.customer_ID
|
||||
"""
|
||||
customers = pd.read_sql_query(customer_query, mydb)
|
||||
|
||||
mydb.close()
|
||||
return service_ids, customers
|
||||
|
||||
|
||||
def user_filter():
|
||||
st.title("User Filter :mag_right:")
|
||||
# Get initial data for widgets
|
||||
initial_service_ids, customers = get_initial_data()
|
||||
# Combine service_ID and name for display
|
||||
service_options = initial_service_ids.apply(lambda row: f"{row['service_ID']} - {row['name']}", axis=1)
|
||||
|
||||
# Add selection widget for customer ID
|
||||
selected_customer = st.selectbox(
|
||||
'Select Customer',
|
||||
["All"] + customers.apply(lambda row: f"{row['customer_ID']} - {row['companyname']} - {row['customer']}",
|
||||
axis=1).tolist()
|
||||
)
|
||||
|
||||
# Extract customer_ID from selected option
|
||||
selected_customer_id = None if selected_customer == "All" else int(selected_customer.split(' - ')[0])
|
||||
|
||||
# Add selection widget for service ID
|
||||
selected_service = st.selectbox(
|
||||
'Select Service',
|
||||
["All"] + service_options.tolist()
|
||||
)
|
||||
|
||||
# Extract service_ID from selected option
|
||||
selected_service_id = None if selected_service == "All" else int(selected_service.split(' - ')[0])
|
||||
|
||||
# Add a button to apply filters
|
||||
if st.button('Apply Filters'):
|
||||
# Fetch filtered data from the database
|
||||
filtered_data = get_filtered_users(selected_customer_id, selected_service_id)
|
||||
|
||||
# Display the filtered data
|
||||
if not filtered_data.empty:
|
||||
st.dataframe(filtered_data)
|
||||
else:
|
||||
st.write("No data available for the selected filters.")
|
||||
136
sites/server.py
136
sites/server.py
|
|
@ -1,136 +0,0 @@
|
|||
import streamlit as st
|
||||
import pandas as pd
|
||||
import mysql.connector
|
||||
from datetime import datetime
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
def get_filtered_server(customer_ids, service_id, service_status, os_type, reporting):
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
|
||||
# Prepare the base query
|
||||
query = f"""
|
||||
select s.hostname, s.privat_ipaddress, s.public_ipaddress, s.ram, s.createdate, s.disabledate, s.os, s.customer_ID, s.server_ID, hc.name, hc.core
|
||||
from Kunden.server s
|
||||
join Kunden.`hardware.cpu` hc ON hc.cpu_ID = s.cpu_ID
|
||||
WHERE 1=1
|
||||
"""
|
||||
|
||||
# If multiple customers are selected, use the IN clause
|
||||
if customer_ids:
|
||||
customer_ids_str = ', '.join([str(id) for id in customer_ids])
|
||||
query += f" AND s.customer_ID IN ({customer_ids_str})"
|
||||
|
||||
if service_id:
|
||||
query += f" AND s.service_ID = {service_id}"
|
||||
if service_status:
|
||||
query += f" AND s.status = {service_status}"
|
||||
if os_type:
|
||||
query += f" AND s.os = '{os_type}'"
|
||||
if reporting == "True":
|
||||
query += f" AND licensekey IS NOT NULL"
|
||||
|
||||
users = pd.read_sql_query(query, mydb)
|
||||
mydb.close()
|
||||
return users
|
||||
|
||||
def get_initial_data():
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
# Fetch unique service IDs and names
|
||||
service_id_query = """
|
||||
SELECT DISTINCT s.service_ID, s.name
|
||||
FROM Kunden.services s
|
||||
"""
|
||||
service_ids = pd.read_sql_query(service_id_query, mydb)
|
||||
|
||||
# Fetch customer information
|
||||
customer_query = """
|
||||
SELECT DISTINCT c.customer_ID, c.customer, co.companyname
|
||||
FROM Kunden.company co
|
||||
JOIN Kunden.customers c ON co.customer_ID = c.customer_ID
|
||||
"""
|
||||
customers = pd.read_sql_query(customer_query, mydb)
|
||||
|
||||
mydb.close()
|
||||
return service_ids, customers
|
||||
|
||||
def server_filter():
|
||||
st.title("Server Filter :mag_right:")
|
||||
# Get initial data for widgets
|
||||
initial_service_ids, customers = get_initial_data()
|
||||
# Combine service_ID and name for display
|
||||
service_options = initial_service_ids.apply(lambda row: f"{row['service_ID']} - {row['name']}", axis=1)
|
||||
|
||||
# Create a dictionary for customer selection
|
||||
customer_dict = {f"{row['companyname']} - {row['customer']}": row['customer_ID'] for _, row in customers.iterrows()}
|
||||
|
||||
# Use multiselect for multiple customer selection
|
||||
selected_customers = st.multiselect(
|
||||
'Select Customer(s)',
|
||||
list(customer_dict.keys()) # Display only companyname and customer
|
||||
)
|
||||
|
||||
# Get the corresponding customer IDs
|
||||
selected_customer_ids = [customer_dict[customer] for customer in selected_customers]
|
||||
|
||||
# Add selection widget for service ID
|
||||
selected_service = st.selectbox(
|
||||
'Select Service',
|
||||
["All"] + service_options.tolist()
|
||||
)
|
||||
|
||||
# Extract service_ID from selected option
|
||||
selected_service_id = None if selected_service == "All" else int(selected_service.split(' - ')[0])
|
||||
|
||||
# Add selection widget for service status
|
||||
selected_status = st.selectbox(
|
||||
'Select Service Status',
|
||||
["All", "1 - Active", "0 - Inactive"]
|
||||
)
|
||||
|
||||
# Extract status from selected option
|
||||
service_status = None if selected_status == "All" else int(selected_status.split(' - ')[0])
|
||||
|
||||
# Add SPLA server selection
|
||||
reporting_box = st.selectbox(
|
||||
'Select SPLA Server',
|
||||
["Nein", "Ja"]
|
||||
)
|
||||
|
||||
# Extract reporting status
|
||||
reporting = None if reporting_box == "Nein" else "True"
|
||||
|
||||
# Add OS type selection
|
||||
os_box = st.selectbox(
|
||||
'Select OS Type',
|
||||
["All", "Linux", "Windows"]
|
||||
)
|
||||
|
||||
# Extract OS type
|
||||
os_type = None if os_box == "All" else os_box
|
||||
|
||||
# Add a button to apply filters
|
||||
if st.button('Apply Filters'):
|
||||
# Fetch filtered data from the database
|
||||
filtered_data = get_filtered_server(selected_customer_ids, selected_service_id, service_status, os_type, reporting)
|
||||
# Display the filtered data
|
||||
if not filtered_data.empty:
|
||||
st.dataframe(filtered_data)
|
||||
st.text(f"CPU SUMME = {sum(filtered_data['core'])}" )
|
||||
st.text(f"Berechung der Core-Pakete: Anzahl der Cores({filtered_data['core'].count()}) * Core-Pakete aus SPLA (8) / 2")
|
||||
st.text(f"Reporting Core-Pakete = {(filtered_data['core'].count())*8/2}".split('.')[0])
|
||||
else:
|
||||
st.write("No data available for the selected filters.")
|
||||
|
|
@ -1,267 +0,0 @@
|
|||
import streamlit as st
|
||||
import pandas as pd
|
||||
import mysql.connector
|
||||
from datetime import datetime, date, timedelta
|
||||
from sqlalchemy import create_engine
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
import altair as alt
|
||||
|
||||
load_dotenv()
|
||||
|
||||
def get_filtered_data(customer_id, service_id, start_date, end_date):
|
||||
"""
|
||||
Fetches the user count data grouped by month within the specified date range.
|
||||
"""
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
|
||||
query = f"""
|
||||
SELECT DATE_FORMAT(add_date, '%Y-%m') AS day,
|
||||
count as count
|
||||
FROM Kunden.`daily.users.count_by_services`
|
||||
WHERE customer_ID = {customer_id}
|
||||
AND services_ID = {service_id}
|
||||
AND add_date BETWEEN '{start_date}' AND '{end_date}'
|
||||
ORDER BY DATE_FORMAT(add_date, '%Y-%m');
|
||||
"""
|
||||
service_reporting = pd.read_sql_query(query, engine)
|
||||
#engine.close()
|
||||
return service_reporting
|
||||
|
||||
|
||||
def get_user_online(customer_id,service_id,start_date,end_date):
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
if service_id == 100:
|
||||
user_info = "sr.primarymail"
|
||||
else:
|
||||
user_info = "sr.username"
|
||||
query = f"""
|
||||
SELECT
|
||||
{user_info} as username
|
||||
FROM Kunden.`daily.user.online` sr
|
||||
WHERE sr.customer_ID = {customer_id}
|
||||
AND sr.services_ID = {service_id}
|
||||
AND sr.timestamp BETWEEN '{start_date}' AND '{end_date}'
|
||||
GROUP BY {user_info}
|
||||
ORDER BY {user_info};
|
||||
"""
|
||||
user_online = pd.read_sql_query(query, engine)
|
||||
user_online_count= user_online.shape[0]
|
||||
#mydb.close()
|
||||
return user_online, user_online_count
|
||||
|
||||
def get_max_user_count(customer_id, service_id, start_date, end_date):
|
||||
"""
|
||||
Fetches the maximum user count within the specified date range.
|
||||
"""
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
user_info = "sr.username"
|
||||
query = f"""
|
||||
SELECT MAX(username) as max_count
|
||||
FROM Kunden.`daily.user.enabled` WHERE customer_ID = {customer_id} AND services_ID = {service_id} AND timestamp BETWEEN '{start_date}' AND '{end_date}'
|
||||
"""
|
||||
|
||||
max_user_count = pd.read_sql_query(query, engine)
|
||||
#mydb.close()
|
||||
return max_user_count.iloc[0]['max_count'] if not max_user_count.empty else 0
|
||||
|
||||
def get_active_users(customer_id, service_id, start_date, end_date):
|
||||
"""
|
||||
Fetch all active users for the given customer, service, and date range
|
||||
based on the most recent activity and status.
|
||||
"""
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
if service_id == 100:
|
||||
user_info = "sr.primarymail"
|
||||
else:
|
||||
user_info = "sr.username"
|
||||
query = f"""
|
||||
SELECT
|
||||
{user_info} as username
|
||||
FROM Kunden.`daily.user.enabled` sr
|
||||
WHERE sr.customer_ID = {customer_id}
|
||||
AND sr.services_ID = {service_id}
|
||||
AND sr.timestamp = '{start_date}'
|
||||
ORDER BY {user_info};
|
||||
"""
|
||||
active_users = pd.read_sql_query(query, engine)
|
||||
user_active_count = active_users.shape[0]
|
||||
return active_users, user_active_count
|
||||
|
||||
def get_user_not_online(customer_id,service_id,start_date,end_date):
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
if service_id == 100:
|
||||
user_info = "primarymail"
|
||||
else:
|
||||
user_info = "username"
|
||||
query = f"""
|
||||
SELECT
|
||||
{user_info} as username
|
||||
FROM Kunden.`daily.user.notonline` sr
|
||||
WHERE sr.customer_ID = {customer_id}
|
||||
AND sr.services_ID = {service_id}
|
||||
AND sr.timestamp BETWEEN '{start_date}' AND '{end_date}'
|
||||
GROUP BY {user_info}
|
||||
ORDER BY {user_info};
|
||||
"""
|
||||
not_active_users = pd.read_sql_query(query, engine)
|
||||
user_not_online_count = not_active_users.shape[0]
|
||||
return not_active_users, user_not_online_count
|
||||
|
||||
def get_initial_data():
|
||||
db_url = (
|
||||
f"mysql+mysqlconnector://{os.getenv('MYSQL_USER')}:"
|
||||
f"{os.getenv('MYSQL_PASSWORD')}@{os.getenv('MYSQL_HOST')}/"
|
||||
f"{os.getenv('MYSQL_DATABASE')}"
|
||||
)
|
||||
engine = create_engine(db_url)
|
||||
|
||||
# Fetch unique service IDs and names
|
||||
service_id_query = """
|
||||
SELECT DISTINCT s.service_ID, s.name
|
||||
FROM Kunden.`services.reporting` sr
|
||||
JOIN Kunden.services s ON sr.service_ID = s.service_ID
|
||||
"""
|
||||
service_ids = pd.read_sql_query(service_id_query, engine)
|
||||
|
||||
# Fetch customer information
|
||||
customer_query = """
|
||||
SELECT c.customer_ID, c.customer, co.companyname
|
||||
FROM Kunden.company co
|
||||
JOIN Kunden.customers c ON co.customer_ID = c.customer_ID
|
||||
JOIN Kunden.`services.reporting`sr ON sr.customer_ID = co.customer_ID
|
||||
GROUP BY c.customer_ID, c.customer, co.companyname;
|
||||
"""
|
||||
customers = pd.read_sql_query(customer_query, engine)
|
||||
|
||||
# Fetch date range
|
||||
date_query = """
|
||||
SELECT MIN(reportingdate) AS min_date, MAX(reportingdate) AS max_date
|
||||
FROM Kunden.`services.reporting`
|
||||
"""
|
||||
date_range = pd.read_sql_query(date_query, engine)
|
||||
|
||||
return service_ids, customers, date_range
|
||||
|
||||
def services_reporting():
|
||||
st.title("Reporting :mag_right:")
|
||||
|
||||
# Get initial data for widgets
|
||||
initial_service_ids, customers, initial_date_range = get_initial_data()
|
||||
service_options = initial_service_ids.apply(lambda row: f"{row['service_ID']} - {row['name']}", axis=1)
|
||||
|
||||
# Selection widget for customer ID
|
||||
customer_dict = {f"{row['companyname']} - {row['customer']}": row['customer_ID'] for _, row in customers.iterrows()}
|
||||
|
||||
# Selectbox with only the customer name and company displayed
|
||||
selected_customer = st.selectbox(
|
||||
'Select Customer',
|
||||
list(customer_dict.keys()) # Display only companyname and customer
|
||||
)
|
||||
|
||||
# Get the corresponding customer ID
|
||||
selected_customer_id = customer_dict[selected_customer]
|
||||
|
||||
# Selection widget for service ID
|
||||
selected_service = st.selectbox(
|
||||
'Select Service',
|
||||
service_options.tolist()
|
||||
)
|
||||
selected_service_id = int(selected_service.split(' - ')[0])
|
||||
|
||||
# Convert date range to datetime objects
|
||||
min_date = initial_date_range['min_date'][0]
|
||||
max_date = initial_date_range['max_date'][0]
|
||||
|
||||
min_date = (date.today().replace(day=1) - timedelta(days=1)).replace(day=1)
|
||||
start_date = st.date_input('Start Date', min_date)
|
||||
end_date = st.date_input('End Date', initial_date_range['max_date'][0])
|
||||
|
||||
if st.button('Apply Filters'):
|
||||
# Fetch filtered data from the database
|
||||
filtered_data = get_filtered_data(selected_customer_id, selected_service_id, start_date, end_date)
|
||||
|
||||
# Fetch max user count in the selected range
|
||||
max_count = get_active_users(selected_customer_id, selected_service_id, start_date, end_date)
|
||||
|
||||
# Sort the data by day
|
||||
filtered_data = filtered_data.sort_values('day')
|
||||
|
||||
if not filtered_data.empty:
|
||||
# Highlight the max value in the chart
|
||||
filtered_data['color'] = filtered_data['count'].apply(lambda x: 'red' if x == max_count else 'steelblue')
|
||||
|
||||
# Create an Altair bar chart
|
||||
bars = alt.Chart(filtered_data).mark_bar().encode(
|
||||
x='day:O',
|
||||
y='count:Q',
|
||||
color=alt.Color('color:N', scale=None, legend=None)
|
||||
)
|
||||
|
||||
# Add text labels to bars
|
||||
text = bars.mark_text(
|
||||
align='center',
|
||||
baseline='middle',
|
||||
dy=-10
|
||||
).encode(
|
||||
text='count:Q'
|
||||
)
|
||||
|
||||
# Combine bars and text into a single chart
|
||||
chart = (bars + text).properties(
|
||||
title='User Enabled'
|
||||
)
|
||||
|
||||
# Fetch the data for users not online, online, and active users
|
||||
not_user_online, max_count_user_not_online = get_user_not_online(selected_customer_id, selected_service_id, start_date, end_date)
|
||||
user_online, user_online_count = get_user_online(selected_customer_id, selected_service_id, start_date, end_date)
|
||||
active_users_data, user_active_count = get_active_users(selected_customer_id, selected_service_id, min_date, end_date)
|
||||
# Create three columns for each DataFrame
|
||||
col1, col2, col3, col4, col5 = st.columns([2,2,2,2,2])
|
||||
|
||||
# Display each DataFrame in a separate column
|
||||
# with col4:
|
||||
# st.subheader(f"{selected_service.split(' - ')[1]} - User not Online")
|
||||
# st.metric(label="1",label_visibility="hidden", value=max_count_user_not_online)
|
||||
# st.data_editor(not_user_online['username'],key="2",use_container_width=True, hide_index=True)
|
||||
|
||||
with col2:
|
||||
st.subheader(f"{selected_service.split(' - ')[1]} - Enabled Users")
|
||||
st.metric(label="1",label_visibility="hidden", value=user_active_count)
|
||||
st.data_editor(active_users_data, hide_index=True)
|
||||
|
||||
with col3:
|
||||
st.subheader(f"{selected_service.split(' - ')[1]} - User Online")
|
||||
st.metric(label="1",label_visibility="hidden", value=user_online_count)
|
||||
st.data_editor(user_online,key=2,hide_index=True)
|
||||
|
||||
|
||||
|
||||
st.altair_chart(chart, use_container_width=True)
|
||||
else:
|
||||
st.write("No data available for the selected filters.")
|
||||
152
sites/tickets.py
152
sites/tickets.py
|
|
@ -1,152 +0,0 @@
|
|||
import streamlit as st
|
||||
import pandas as pd
|
||||
import subprocess
|
||||
import mysql.connector
|
||||
from datetime import datetime, timedelta
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
total_time_unit = []
|
||||
|
||||
def get_filtered_users(customer_id, start_date, end_date):
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
|
||||
# Prepare the base query with date filter
|
||||
query = f"""
|
||||
SELECT number, title, createdate, time FROM Kunden.tickets
|
||||
WHERE closedate BETWEEN '{start_date}' AND '{end_date}'
|
||||
"""
|
||||
if customer_id:
|
||||
query += f" AND customer_ID = {customer_id}"
|
||||
query += " ORDER BY createdate"
|
||||
|
||||
users = pd.read_sql_query(query, mydb)
|
||||
mydb.close()
|
||||
|
||||
# Format date columns
|
||||
for column in users.select_dtypes(include=['datetime64[ns]', 'datetime64[ns, UTC]']).columns:
|
||||
users[column] = users[column].dt.strftime('%d.%m.%Y')
|
||||
|
||||
# Convert 'time' to integer
|
||||
users.iloc[:, 3] = users.iloc[:, 3].str.split('.').str[0]
|
||||
users['time'] = users['time'].astype(int)
|
||||
|
||||
# Create hyperlink for ticket number
|
||||
base_url = "https://ticket.stines.de/#ticket/zoom/number/"
|
||||
users['Link'] = users['number'].apply(lambda x: f'<a href="{base_url}{x}" target="_blank">{x}</a>')
|
||||
|
||||
return users
|
||||
|
||||
def get_initial_data():
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
# Fetch unique service IDs and names
|
||||
service_id_query = """
|
||||
SELECT DISTINCT s.service_ID, s.name
|
||||
FROM Kunden.services s
|
||||
"""
|
||||
service_ids = pd.read_sql_query(service_id_query, mydb)
|
||||
|
||||
# Fetch customer information
|
||||
customer_query = """
|
||||
SELECT DISTINCT c.customer_ID, c.customer, co.companyname
|
||||
FROM Kunden.company co
|
||||
JOIN Kunden.customers c ON co.customer_ID = c.customer_ID
|
||||
"""
|
||||
customers = pd.read_sql_query(customer_query, mydb)
|
||||
|
||||
mydb.close()
|
||||
return service_ids, customers
|
||||
|
||||
def run_script_and_list_documents():
|
||||
output_area = st.empty() # Placeholder for output
|
||||
document_paths = [] # List to store generated document paths
|
||||
|
||||
# Directory where the script should be run
|
||||
working_directory = "apps/ticket_export/exports"
|
||||
|
||||
# Run the script in the specified directory
|
||||
result = subprocess.run(
|
||||
["python3", "apps/ticket_export/main.py"],
|
||||
capture_output=True,
|
||||
text=True
|
||||
)
|
||||
|
||||
# Search for generated documents in the working directory
|
||||
for filename in os.listdir(working_directory):
|
||||
if filename.endswith(".docx"):
|
||||
full_path = os.path.join(working_directory, filename)
|
||||
document_paths.append(full_path)
|
||||
|
||||
# Display the list of generated documents and offer them for download
|
||||
if document_paths:
|
||||
st.write("Generated Documents:")
|
||||
for doc_path in document_paths:
|
||||
with open(doc_path, "rb") as file:
|
||||
st.download_button(
|
||||
label=f"Download {os.path.basename(doc_path)}",
|
||||
data=file,
|
||||
file_name=os.path.basename(doc_path),
|
||||
mime="application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
)
|
||||
else:
|
||||
st.write("No documents found.")
|
||||
|
||||
def ticket_filter():
|
||||
st.title("Ticket Filter :mag_right:")
|
||||
# Get initial data for widgets
|
||||
initial_service_ids, customers = get_initial_data()
|
||||
|
||||
# Add selection widget for customer ID
|
||||
selected_customer = st.selectbox(
|
||||
'Select Customer',
|
||||
["All"] + customers.apply(lambda row: f"{row['customer_ID']} - {row['companyname']} - {row['customer']}",
|
||||
axis=1).tolist()
|
||||
)
|
||||
# Extract customer_ID from selected option
|
||||
selected_customer_id = None if selected_customer == "All" else int(selected_customer.split(' - ')[0])
|
||||
|
||||
# Add date range picker
|
||||
start_date = st.date_input("Start Date", datetime.now() - timedelta(days=30))
|
||||
end_date = st.date_input("End Date", datetime.now())
|
||||
|
||||
if st.button("Create Invoice"):
|
||||
st.write("Running the script...")
|
||||
run_script_and_list_documents()
|
||||
|
||||
# Add a button to apply filters
|
||||
if st.button('Apply Filters'):
|
||||
if start_date > end_date:
|
||||
st.error("Error: End date must be greater than or equal to start date.")
|
||||
else:
|
||||
# Fetch filtered data from the database
|
||||
filtered_data = get_filtered_users(selected_customer_id, start_date, end_date)
|
||||
if not filtered_data.empty:
|
||||
st.markdown(filtered_data.to_html(escape=False), unsafe_allow_html=True)
|
||||
# Convert DataFrame to CSV
|
||||
csv = filtered_data.drop(columns=['Link']).to_csv(index=False)
|
||||
|
||||
st.write(f"Total Time Unit: {filtered_data['time'].sum()}")
|
||||
|
||||
# Create a download button with a custom file name
|
||||
st.download_button(
|
||||
label="Download CSV",
|
||||
data=csv,
|
||||
file_name=f"filtered_data_{selected_customer}.csv", # Custom file name
|
||||
mime='text/csv',
|
||||
)
|
||||
else:
|
||||
st.write("No data available for the selected filters.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
ticket_filter()
|
||||
|
|
@ -1,116 +0,0 @@
|
|||
import streamlit as st
|
||||
import pandas as pd
|
||||
import mysql.connector
|
||||
from datetime import datetime
|
||||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv()
|
||||
|
||||
|
||||
def get_filtered_users(customer_id, service_id, service_status):
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
|
||||
# Prepare the base query
|
||||
query = f"""
|
||||
SELECT us.user_id, u.username, s.service_ID, uss.status, uss.timestamp, c.companyname
|
||||
FROM Kunden.`users.status` us
|
||||
JOIN (
|
||||
SELECT user_id, MAX(timestamp) AS latest_timestamp
|
||||
FROM Kunden.`users.status`
|
||||
GROUP BY user_id
|
||||
) latest ON us.user_id = latest.user_id AND us.timestamp = latest.latest_timestamp
|
||||
JOIN Kunden.users u ON u.user_ID = us.user_id
|
||||
JOIN Kunden.`users.services` uss ON us.user_id = uss.user_id
|
||||
JOIN Kunden.services s ON uss.service_ID = s.service_ID
|
||||
JOIN Kunden.company c ON c.customer_ID = us.customer_ID
|
||||
WHERE 1=1
|
||||
"""
|
||||
if customer_id:
|
||||
query += f"AND us.customer_ID = {customer_id}"
|
||||
if service_id:
|
||||
query += f" AND s.service_ID = {service_id}"
|
||||
if service_status:
|
||||
query += f" AND uss.status = {service_status}"
|
||||
query += " ORDER BY uss.status DESC"
|
||||
users = pd.read_sql_query(query, mydb)
|
||||
mydb.close()
|
||||
return users
|
||||
|
||||
|
||||
def get_initial_data():
|
||||
mydb = mysql.connector.connect(
|
||||
host=os.getenv("MYSQL_HOST"),
|
||||
user=os.getenv("MYSQL_USER"),
|
||||
password=os.getenv("MYSQL_PASSWORD"),
|
||||
database=os.getenv("MYSQL_DATABASE")
|
||||
)
|
||||
# Fetch unique service IDs and names
|
||||
service_id_query = """
|
||||
SELECT DISTINCT s.service_ID, s.name
|
||||
FROM Kunden.services s
|
||||
"""
|
||||
service_ids = pd.read_sql_query(service_id_query, mydb)
|
||||
|
||||
# Fetch customer information
|
||||
customer_query = """
|
||||
SELECT DISTINCT c.customer_ID, c.customer, co.companyname
|
||||
FROM Kunden.company co
|
||||
JOIN Kunden.customers c ON co.customer_ID = c.customer_ID
|
||||
"""
|
||||
customers = pd.read_sql_query(customer_query, mydb)
|
||||
|
||||
mydb.close()
|
||||
return service_ids, customers
|
||||
|
||||
|
||||
def user_filter():
|
||||
st.title("User Filter :mag_right:")
|
||||
# Get initial data for widgets
|
||||
initial_service_ids, customers = get_initial_data()
|
||||
# Combine service_ID and name for display
|
||||
service_options = initial_service_ids.apply(lambda row: f"{row['service_ID']} - {row['name']}", axis=1)
|
||||
|
||||
# Add selection widget for customer ID
|
||||
selected_customer = st.selectbox(
|
||||
'Select Customer',
|
||||
["All"] + customers.apply(lambda row: f"{row['customer_ID']} - {row['companyname']} - {row['customer']}",
|
||||
axis=1).tolist()
|
||||
)
|
||||
|
||||
# Extract customer_ID from selected option
|
||||
selected_customer_id = None if selected_customer == "All" else int(selected_customer.split(' - ')[0])
|
||||
|
||||
# Add selection widget for service ID
|
||||
selected_service = st.selectbox(
|
||||
'Select Service',
|
||||
["All"] + service_options.tolist()
|
||||
)
|
||||
|
||||
# Extract service_ID from selected option
|
||||
selected_service_id = None if selected_service == "All" else int(selected_service.split(' - ')[0])
|
||||
|
||||
# Add selection widget for service status
|
||||
selected_status = st.selectbox(
|
||||
'Select Service Status',
|
||||
["All", "1 - Active", "0 - Inactive"]
|
||||
)
|
||||
|
||||
# Extract status from selected option
|
||||
service_status = None if selected_status == "All" else int(selected_status.split(' - ')[0])
|
||||
|
||||
# Add a button to apply filters
|
||||
if st.button('Apply Filters'):
|
||||
# Fetch filtered data from the database
|
||||
filtered_data = get_filtered_users(selected_customer_id, selected_service_id, service_status)
|
||||
|
||||
# Display the filtered data
|
||||
if not filtered_data.empty:
|
||||
st.dataframe(filtered_data)
|
||||
else:
|
||||
st.write("No data available for the selected filters.")
|
||||
|
|
@ -1 +0,0 @@
|
|||
streamlit run app.py --server.port=80 --server.address=0.0.0.0
|
||||
16
style.css
16
style.css
|
|
@ -32,20 +32,4 @@
|
|||
}
|
||||
.st-d7 , .st-d6 ,.st-d5 ,.st-d4 {
|
||||
border-color: #fcbb2e;
|
||||
}
|
||||
|
||||
.st-emotion-cache-1gwvy71{
|
||||
padding: 0;
|
||||
padding-left: 30px;
|
||||
}
|
||||
.e1f1d6gn4 > .stButton > button{
|
||||
width: 100%;
|
||||
justify-content: left;
|
||||
border: 0;
|
||||
}
|
||||
[data-testid="baseButton-secondary"]:focus{
|
||||
background-color: #f4f3f366 !important;
|
||||
}
|
||||
[data-testid="baseButton-secondary"]:active{
|
||||
background-color: #f4f3f366!important;
|
||||
}
|
||||
Loading…
Reference in New Issue