master
Sebastian Serfling 2023-08-11 09:06:14 +02:00
commit 1a7e112df3
22 changed files with 715 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
/build/
*.spec

3
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
# Default ignored files
/shelf/
/workspace.xml

View File

@ -0,0 +1,6 @@
<component name="InspectionProjectProfileManager">
<settings>
<option name="USE_PROJECT_PROFILE" value="false" />
<version value="1.0" />
</settings>
</component>

4
.idea/misc.xml Normal file
View File

@ -0,0 +1,4 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.9 (server-info) (2)" project-jdk-type="Python SDK" />
</project>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/server-info.iml" filepath="$PROJECT_DIR$/.idea/server-info.iml" />
</modules>
</component>
</project>

10
.idea/server-info.iml Normal file
View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="PYTHON_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<excludeFolder url="file://$MODULE_DIR$/venv" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

0
Controller/__init__.py Normal file
View File

View File

@ -0,0 +1,91 @@
import subprocess
import csv
from datetime import datetime
from Controller import mysql_connect
def adcontroller (ip,name):
command = ["powershell", "-Command",
"Get-ADUser -Filter * -Properties * | Export-Csv -NoTypeInformation -Encoding UTF8 -Path 'users.csv'"]
subprocess.run(command)
# CSV-Datei einlesen und Feldnamen auslesen
with open("users.csv", "r", encoding='utf-8-sig') as file:
reader = csv.DictReader(file)
fieldnames = reader.fieldnames
# Tabelle erstellen, falls sie noch nicht existiert
table_name = "Active-Directory-User"
create_table_query = f"CREATE TABLE IF NOT EXISTS `{table_name}` (id INT AUTO_INCREMENT PRIMARY KEY, importdate BIGINT(11), "
for field in fieldnames:
create_table_query += f"`{field}` TEXT, "
create_table_query = create_table_query.rstrip(", ") + ")"
mysql_connect.create_database(create_table_query, name)
# Daten aus der CSV-Datei in die Tabelle einfügen
with open("users.csv", "r", encoding='utf-8-sig') as file:
reader = csv.reader(file)
next(reader) # Überspringe die erste Zeile (Feldnamen)
for row in reader:
row = [cell if cell.strip() else "-" for cell in row]
unix_time = int(datetime.now().timestamp())
print(unix_time)
row = [unix_time] + row
insert_query = f"INSERT INTO `{table_name}` (importdate, `{'`, `'.join(fieldnames)}`) VALUES (%s, {', '.join(['%s'] * len(fieldnames))})"
mysql_connect.add_user(insert_query, name, row)
# Datenbankverbindung schließen
command = ["powershell", "-Command",
"Get-ADGroupMember -Identity G-RDP-User | Export-Csv -NoTypeInformation -Encoding UTF8 -Path 'group.csv'"]
subprocess.run(command)
# CSV-Datei einlesen und Feldnamen auslesen
with open("group.csv", "r", encoding='utf-8-sig') as file:
reader = csv.DictReader(file)
fieldnames = reader.fieldnames
table_name = "Active-Directory-RDS-User"
create_table_query = f"CREATE TABLE IF NOT EXISTS `{table_name}` (id INT AUTO_INCREMENT PRIMARY KEY, importdate BIGINT(11), "
for field in fieldnames:
create_table_query += f"`{field}` TEXT, "
create_table_query = create_table_query.rstrip(", ") + ")"
mysql_connect.create_database(create_table_query, name)
# Daten aus der CSV-Datei in die Tabelle aktualisieren oder einfügen
with open("group.csv", "r", encoding='utf-8-sig') as file:
reader = csv.DictReader(file)
for row in reader:
sam_account_name = row["SamAccountName"]
# Abfrage, um die ID für den Benutzernamen zu erhalten
query_id = f"SELECT id FROM `{table_name}` WHERE SamAccountName = %s"
result = mysql_connect.get_user(query_id, name, (sam_account_name,))
if result:
user_id = result[0]
print(user_id)
update_query = f"UPDATE `{table_name}` SET "
for field in fieldnames:
if field != "SamAccountName": # Spaltenname nicht in das UPDATE einbeziehen
update_query += f"`{field}` = %s, "
update_query = update_query.rstrip(", ")
update_query += ", `importdate` = %s" # Importdate als Unix-Timestamp aktualisieren
update_query += " WHERE id = %s"
row_data = [row[field] for field in fieldnames if field != "SamAccountName"]
unix_time = int(datetime.now().timestamp())
row_data = row_data + [unix_time]
row_data.append(user_id)
print(row_data)
mysql_connect.add_user(update_query, name, row_data)
else:
insert_query = f"INSERT INTO `{table_name}` ("
insert_query += ", ".join(fieldnames) # Spaltennamen in die INSERT-Abfrage einbeziehen
insert_query += ", `importdate`) VALUES ("
insert_query += ", ".join(["%s"] * (len(fieldnames) + 1)) # Zusätzliches %s für importdate
insert_query += ")"
row_data = [row[field] for field in fieldnames]
unix_time = int(datetime.now().timestamp())
row_data = row_data + [unix_time]
print(row_data)
mysql_connect.add_user(insert_query, name, row_data)
# connection.commit()

View File

@ -0,0 +1,50 @@
import subprocess
import csv
from datetime import datetime
from Controller import mysql_connect
def exchange (ip,name):
exchange_commands = "Add-PSSnapin Microsoft.Exchange.Management.PowerShell.SnapIn; Get-Mailbox | Select-Object PSComputerName, RunspaceId, PSShowComputerName, Database, UseDatabaseRetentionDefaults, RetainDeletedItemsUntilBackup, IsHierarchyReady, IsHierarchySyncEnabled,RetentionPolicy, ExchangeGuid, AdminDisplayVersion, ExchangeUserAccountControl, IsMailboxEnabled,ProhibitSendQuota, ProhibitSendReceiveQuota, RecoverableItemsQuota, RecoverableItemsWarningQuota,CalendarLoggingQuota, IsResource, IsLinked, IsShared, IsRootPublicFolderMailbox, LinkedMasterAccount, ResetPasswordOnNextLogon,ResourceType, RoomMailboxAccountEnabled, SamAccountName, ServerLegacyDN, UseDatabaseQuotaDefaults,IssueWarningQuota, RulesQuota, UserPrincipalName, RoleAssignmentPolicy, EffectivePublicFolderMailbox, SharingPolicy,ArchiveQuota, ArchiveWarningQuota, DisabledMailboxLocations, CalendarVersionStoreDisabled, AuditEnabled,AuditLogAgeLimit, WhenMailboxCreated, AccountDisabled, Alias, OrganizationalUnit, DisplayName,LegacyExchangeDN, MaxSendSize, MaxReceiveSize, EmailAddressPolicyEnabled, PrimarySmtpAddress,RecipientType, RecipientTypeDetails, WindowsEmailAddress, Identity, IsValid, ExchangeVersion,DistinguishedName, Guid, ObjectCategory, WhenChanged, WhenCreated, WhenChangedUTC, WhenCreatedUTC,OrganizationId, Id, OriginatingServer | Export-Csv -Path 'exuser.csv' -NoTypeInformation -Encoding UTF8"
# PowerShell als Subprozess ausführen
process = subprocess.Popen(["powershell", "-Command", exchange_commands], stdout=subprocess.PIPE, shell=True)
# Warte auf den Abschluss des PowerShell-Prozesses
process.communicate()
# CSV-Datei einlesen und Feldnamen auslesen
with open("exuser.csv", "r", encoding='utf-8-sig') as file:
reader = csv.DictReader(file)
fieldnames = reader.fieldnames
# Tabelle erstellen, falls sie noch nicht existiert
table_name = "Exchange-User"
create_table_query = f"CREATE TABLE IF NOT EXISTS `{table_name}` (id INT AUTO_INCREMENT PRIMARY KEY, importdate INT(11), "
for field in fieldnames:
if "Id" == field:
field = "ADUserPath"
create_table_query += f"`{field}` TEXT, "
create_table_query = create_table_query.rstrip(", ") + ")"
create_table_query += " ROW_FORMAT=DYNAMIC"
row_length = len(create_table_query)
print(row_length)
print(create_table_query)
mysql_connect.create_database(create_table_query, name)
with open("exuser.csv", "r", encoding='utf-8-sig') as file:
reader = csv.reader(file)
next(reader)
for row in reader:
print(row)
if fieldnames[65] == 'Id':
fieldnames[65] = "ADUserPath"
print(fieldnames[65])
row = [cell if cell.strip() else "-" for cell in row]
unix_time = int(datetime.now().timestamp())
row = [unix_time] + row
# Führe das Einfüge-Query aus
insert_query = f"INSERT INTO `{table_name}` (importdate, `{'`, `'.join(fieldnames)}`) VALUES (%s, {', '.join(['%s'] * len(fieldnames))})"
mysql_connect.add_user(insert_query, name, row)

View File

@ -0,0 +1,86 @@
import mysql.connector.locales.eng
import mysql.connector
from sshtunnel import SSHTunnelForwarder
import os
def ping(host):
# Führe den Ping-Befehl aus und erfasse die Ausgabe
result = os.system("ping -c 1 " + host)
if result == 0:
hostname = "172.17.1.21", 22
else:
hostname = "forward.stines.de", 2223
return hostname
server = SSHTunnelForwarder(
("forward.stines.de", 2223),
ssh_username="root",
ssh_password="adm.3dfx12",
remote_bind_address=('127.0.0.1', 3306)
)
def database(query,name,user):
server.start()
print(f"SSH Server start Port:{server.local_bind_port}")
mydb = mysql.connector.connect(
host="127.0.0.1",
port=server.local_bind_port,
user="root",
password="N53yBCswuawzBzS445VNAhWVMs3N59Gb9szEsrzXRBzarDqpdETpQeyt5v5CGe",
database="" + name,
auth_plugin='mysql_native_password',
)
mydb.connect()
print("SQL Server Connect")
cursor = mydb.cursor()
if "SELECT" in query:
if user:
cursor.execute(query, user)
return cursor.fetchone()
else:
cursor.execute(query, user)
print("Inside SELECT ALL")
return cursor.fetchall()
if "SHOW" in query:
cursor.execute(query )
return cursor.fetchall()
if "INSERT" in query:
cursor.execute(query)
mydb.commit()
if "UPDATE" in query:
cursor.execute(query)
mydb.commit()
mydb.close()
server.stop()
def get_ip(query):
print("GetIP")
user = ""
name = ""
return database(query, name, user)
def get_database():
name = ""
user = ""
query = 'SHOW DATABASES'
print(database(query,name,user))
return database(query,name,user)
def create_database(query,name):
try:
user = ""
database(query,name,user)
return ("Database Created")
except Exception as e:
print("Ein Fehler ist aufgetreten:", str(e))
def add_user(query,name,user):
try:
database(query, name, user)
return ("User Added")
except Exception as e:
print("Ein Fehler ist aufgetreten:", str(e))
def get_user(query,name,user):
return database(query,name,user)
def get_cpu(query,name,cpu):
return database(query,name,cpu)
def update_cpu(query,name,cpu):
return database(query,name,cpu)

36
Controller/rds_export.py Normal file
View File

@ -0,0 +1,36 @@
import subprocess
from datetime import datetime
from Controller import mysql_connect
def rds (ip,name):
powershell_command = "quser | Select-String -Pattern 'Aktiv'"
result = subprocess.run(["powershell", "-Command", powershell_command], capture_output=True, text=True)
fieldname = ["Benutzername","Anmeldezeit","Anmeldedatum"]
# Die Ausgabe des Befehls aufteilen und die Benutzernamen und Anmeldezeiten extrahieren
output_lines = result.stdout.strip().split('\n')[1:]
logged_in_users = []
# Tabelle erstellen, falls sie noch nicht existiert
table_name = "RDS-Online-User"
create_table_query = f"CREATE TABLE IF NOT EXISTS `{table_name}` (id INT AUTO_INCREMENT PRIMARY KEY, importdate INT(11), "
for field in fieldname:
create_table_query += f"`{field}` TEXT, "
create_table_query = create_table_query.rstrip(", ") + ")"
create_table_query += " ROW_FORMAT=DYNAMIC"
user = ""
mysql_connect.create_database(create_table_query, name)
for line in output_lines:
parts = line.split()
username = parts[0].replace(">","")
login_time = parts[5]
login_date = parts[6]
logged_in_users.append((username,login_date, login_time))
for user in logged_in_users:
user = [cell if cell.strip() else "-" for cell in user]
unix_time = int(datetime.now().timestamp())
user = [unix_time] + user
insert_query = f"INSERT INTO `{table_name}` (importdate, `{'`, `'.join(fieldname)}`) VALUES (%s, {', '.join(['%s'] * len(fieldname))})"
mysql_connect.add_user(insert_query, name, user)

View File

@ -0,0 +1,27 @@
import win32evtlog
def read_backup_logs():
log_type = 'Application' # Ereignisprotokolltyp (z. B. 'Application', 'System', etc.)
log_source = 'Microsoft-Windows-Backup' # Quelle des Ereignisprotokolls für die Sicherung
handle = win32evtlog.OpenEventLog(None, log_type)
flags = win32evtlog.EVENTLOG_BACKWARDS_READ | win32evtlog.EVENTLOG_SEQUENTIAL_READ
events = win32evtlog.ReadEventLog(handle, flags, 0)
backup_events = []
for event in events:
if event.SourceName == log_source:
backup_events.append({
'TimeGenerated': event.TimeGenerated.Format(),
'Message': event.StringInserts[0]
})
win32evtlog.CloseEventLog(handle)
return backup_events
if __name__ == "__main__":
backup_logs = read_backup_logs()
for event in backup_logs:
print(f"Time: {event['TimeGenerated']}\nMessage: {event['Message']}\n")

65
Controller/smtp_export.py Normal file
View File

@ -0,0 +1,65 @@
import requests
from datetime import datetime
from Controller import mysql_connect
def smtp(ip,name,mailcow_token):
mailcow_url = f"http://{ip}/api/v1"
mailcow_token = f"{mailcow_token}"
response = requests.get(f"{mailcow_url}/get/mailbox/all", headers={"X-API-Key": mailcow_token})
json_data = response.json()
for entry in json_data:
frame = {'frame':'s',
'value':'10'}
for column, frame in frame.items():
if column not in entry:
entry[column] = frame
## löscht das Feld attributes
for entry in json_data:
attributes = entry.pop('attributes', None)
if attributes:
entry.update(attributes)
## löscht das Feld attributes
for entry in json_data:
rl = entry.pop('rl', None)
if rl:
entry.update(rl)
print(entry)
## Fühlt alle leeren Felder mit "-" auf
for entry in json_data:
for key, value in entry.items():
if value is None or value == "":
entry[key] = "-"
fields = list(json_data[0].keys())
table_name = "SMTP-User"
create_table_query = f"CREATE TABLE IF NOT EXISTS `{table_name}` (id INT AUTO_INCREMENT PRIMARY KEY,importdate BIGINT(11), "
for field in fields:
if field == 'id':
field = 'User-ID'
create_table_query += f"`{field}` TEXT, "
create_table_query = create_table_query.rstrip(", ") + ")"
mysql_connect.create_database(create_table_query, name)
print(len(fields))
columns = ", ".join(json_data[0].keys())
values_placeholder = ", ".join(["%s"] * len(fields))
unix_time = int(datetime.now().timestamp())
print(unix_time)
#SQL-Query zusammenstellen
insert_query = f"INSERT INTO `{table_name}` (importdate,{columns}) VALUES (%s,{values_placeholder})"
# Durchlaufen der JSON-Daten und Einfügen in die Datenbank
for entry in json_data:
values = tuple(
cell if isinstance(cell, int) else cell.strip() if cell.strip() else "-" for cell in entry.values())
mysql_connect.add_user(insert_query, name, (unix_time,) + values)

32
Controller/system_info.py Normal file
View File

@ -0,0 +1,32 @@
import psutil
import platform
import math
import cpuinfo
from Controller import mysql_connect
def get_cpu_info():
return cpuinfo.get_cpu_info()['brand_raw']
def get_cpu_sql():
query = f"SELECT `Prozessor-Anzahl` FROM `CPU-Liste` WHERE `CPU-Name`='{cpuinfo.get_cpu_info()['brand_raw']}'"
return mysql_connect.get_cpu(query,"Stines-GmbH",cpuinfo.get_cpu_info()['brand_raw'])
def get_ram_info():
ram_info = psutil.virtual_memory()
total_ram = ram_info.total / (1024 ** 3) # In Gigabytes
return math.ceil(total_ram)
def get_hdd_info():
total_disk_size = 0
disk_partitions = psutil.disk_partitions()
for partition in disk_partitions:
# Wenn du das Laufwerk C: ignorieren möchtest
if partition.device != 'C:\\':
partition_info = psutil.disk_usage(partition.mountpoint)
total_disk_size += partition_info.total / (1024 ** 3)
return math.ceil(total_disk_size)
# Informationen über die CPU
def set_system_info(name,ipadress):
query = f"UPDATE `Kunden-Server` SET RAM={get_ram_info()}, Prozessor={get_cpu_sql()[0]}, CPU='{get_cpu_info()}' WHERE `IP-Adresse`='{ipadress}'"
return mysql_connect.update_cpu(query,"Stines-GmbH",get_cpu_sql()[0])

69
Controller/zammad.py Normal file
View File

@ -0,0 +1,69 @@
import psycopg2
from datetime import datetime, timedelta
from Controller import mysql_connect
def tickets(ip,name):
# Verbindungsinformationen zur PostgreSQL-Datenbank
db_host = "172.17.1.5"
db_name = "zammad_production"
db_user = "zammad"
db_password = "zammad"
table_name = "Tickets"
# Verbindung zur Datenbank herstellen
connection = psycopg2.connect(
host=db_host,
database=db_name,
user=db_user,
password=db_password
)
## Abfrage der Daten mit SL / ZL
cursor = connection.cursor()
query = "SELECT tickets.*, tag_items.name AS SLA FROM tickets LEFT JOIN tags ON tickets.id = tags.o_id LEFT JOIN tag_items ON tags.tag_item_id = tag_items.id;"
cursor.execute(query)
## Header auslesen
headers = [desc[0] for desc in cursor.description]
## Orga Auslesen
group = f"SELECT id FROM groups WHERE name = '{name.replace('ae','ä').replace('ue','ü').replace('oe','ö').replace('-',' ')}'"
cursor.execute(group)
query = f"SELECT tickets.*, tag_items.name AS SLA FROM tickets LEFT JOIN tags ON tickets.id = tags.o_id LEFT JOIN tag_items ON tags.tag_item_id = tag_items.id WHERE group_id = {cursor.fetchone()[0]}"
cursor.execute(query)
results = cursor.fetchall()
column_type = f"SELECT column_name, data_type FROM information_schema.columns WHERE table_name = 'tickets' order by column_name ASC;"
cursor.execute(column_type)
column_type = cursor.fetchall()
columns = ""
## Cretae Tabelle with column_type
create_table_query = f"CREATE TABLE IF NOT EXISTS `{table_name}` (importdate BIGINT(11), "
for i in column_type:
columns += f"{i[0]},"
if i[1] == "timestamp without time zone":
create_table_query += f"`{i[0]}` DATETIME, "
elif i[1] == "character varying":
create_table_query += f"`{i[0]}` TEXT, "
else:
create_table_query += f"`{i[0]}` {i[1]}, "
create_table_query = create_table_query.rstrip(",") + "sla TEXT)"
mysql_connect.create_database(create_table_query, name)
for row in results:
unix_time = int(datetime.now().timestamp())
id = mysql_connect.get_user(f"SELECT id FROM `{table_name}` where id = '%s'", name,(row[0],))
given_date = datetime.strptime(str(row[36]), "%Y-%m-%d %H:%M:%S.%f")
yesterday = datetime.now() - timedelta(days=1)
if id is None:
insert_query = f"INSERT INTO `{table_name}` (importdate, `{'`, `'.join(headers)}`) VALUES (%s, {', '.join(['%s'] * len(headers))})"
mysql_connect.add_user(insert_query, name, (unix_time,) + row)
else:
if given_date > yesterday:
update_query = f"UPDATE `{table_name}` SET "
for field in headers:
update_query += f" `{field}` = %s, "
update_query = update_query.rstrip(", ")
update_query += f" WHERE id = {row[0]}"
mysql_connect.add_user(update_query,name, row)

1
README.md Normal file
View File

@ -0,0 +1 @@
### Python Script für REPORTS

49
Start.py Normal file
View File

@ -0,0 +1,49 @@
import requests
import subprocess
import os
import shutil
import random
def download_and_run_file(url, filename):
response = requests.get(url)
if response.status_code == 200:
with open(filename, 'wb') as f:
f.write(response.content)
print(f"File '{filename}' downloaded successfully.")
else:
print(f"Failed to download the file from {url}. Status code: {response.status_code}")
return
try:
subprocess.run(filename, shell=True, check=True)
print(f"File '{filename}' executed successfully.")
except subprocess.CalledProcessError as e:
print(f"Failed to execute the file. Error: {e}")
def add_windows_defender_exception(path):
try:
# Example: Add-MpPreference -ExclusionPath "C:\path\to\your\folder"
command_line = f'powershell Add-MpPreference -ExclusionPath "{path}"'
subprocess.run(command_line, shell=True, check=True)
print(f"Windows Defender exception added for path: {path}")
except subprocess.CalledProcessError as e:
print(f"Failed to add Windows Defender exception. Error: {e}")
if __name__ == "__main__":
try:
os.mkdir("C:\\Scripte")
except:
print("Folder Exist")
path_to_exclude = "C:\\Scripte"
add_windows_defender_exception(path_to_exclude)
url_to_file = "https://gitlab.stines.de/sebastian.serfling/REPORTS/raw/branch/main/dist/main.exe"
file_name = "C:\\Scripte\\Reports.exe"
download_and_run_file(url_to_file, file_name)
try:
shutil.move("Start.exe","C:\\Scripte\\Start.exe")
except:
print("File was moved!")

BIN
dist/main.exe vendored Normal file

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 20 KiB

142
main.py Normal file
View File

@ -0,0 +1,142 @@
import socket
from Controller import adcontroller_export, exchange_export, rds_export, smtp_export, zammad, system_info
import Controller.mysql_connect as mysql_connect
import random
import subprocess
import socket
def get_local_ip():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
local_ip = s.getsockname()[0]
s.close()
print(local_ip)
return local_ip
except Exception as e:
print(f"Fehler beim Abrufen der lokalen IP-Adresse: {e}")
return None
def system_info_get(ip,name):
system_info.set_system_info(name,ip)
def adcontroller(ip,name):
adcontroller_export.adcontroller(ip, name)
def exchange(ip,name):
exchange_export.exchange(ip, name)
def smtp(ip,name,token):
smtp_export.smtp(ip, name, token)
def rds(ip, name):
rds_export.rds(ip, name)
def cms(ip, servername):
## Import User als RAW
print("" + ip + servername)
def mstore(ip, servername):
## Import User(last Check) als RAW
print("" + ip + servername)
def cloud(ip,servername):
## Import User(space) als RAW
print("" + ip + servername)
def vpn(ip,servername):
## Import Online Time als RAW
print("" + ip + servername)
def nginx(ip,servername):
## Import Letsencrypt Time als RAW
print("" + ip + servername)
def tickets(ip,name):
zammad.tickets(ip,name)
def data(ip,servername):
## Import Space Used als RAW
print("" + ip + servername)
def omada(ip,servername):
## Import Access als RAW
print("" + ip + servername)
def matomo(ip,servername):
## Import Statics als RAW
print("" + ip + servername)
def gucamole(ip,servername):
## Import User + Maschinen als RAW
print("" + ip + servername)
def create_windows_task(task_name, command, schedule):
try:
command_line = f'schtasks /F /create /ru "SYSTEM" /tn "{task_name}" /tr "{command}" /sc {schedule}'
subprocess.run(command_line, shell=True, check=True)
print(f"Windows task '{task_name}' created successfully.")
except subprocess.CalledProcessError as e:
print(f"Failed to create Windows task. Error: {e}")
# query = "SELECT * FROM `Stines-GmbH`.`Kunden-Server` WHERE `IP-Adresse` = ""'{}'""".format(get_local_ip())
query = "SELECT * FROM `Stines-GmbH`.`Kunden-Server` WHERE `IP-Adresse` = ""'{}'""".format("172.19.1.6")
print("Before Get List")
print(mysql_connect.get_ip(query))
list = mysql_connect.get_ip(query)
print("Atfer Get IP")
if list == []:
query_insert = f"INSERT INTO `Kunden-Server` (Name,`Server-Name`,`IP-Adresse`,Funktion,CPU,RAM,Speicher) VALUES ('unkown','{socket.gethostname()}','{get_local_ip()}','-','{system_info.get_cpu_info()}','{system_info.get_ram_info()}','{system_info.get_hdd_info()}')"
print("List is Empty")
mysql_connect.add_user(query_insert,"Stines-GmbH","")
set_ipaddress = []
print("Before Row")
# Ergebnisse durchlaufen und ausgeben
for row in list:
name = row[2]
ipadress = row[4]
set_ipaddress = row[4]
lastchange = row[6]
funktion = row[7]
token = row[8]
print(name)
cursor = mysql_connect.get_database()
# Überprüfen, ob die Datenbank bereits existiert
database_exists = False
for (db_name,) in cursor:
if name in str(db_name):
database_exists = True
if "-" in str(db_name):
database_exists = True
# Datenbank erstellen, wenn sie noch nicht existiert
if not database_exists:
create_database_query = f"CREATE DATABASE `{name}`"
mysql_connect.create_database(create_database_query,name)
print(f"Die Datenbank '`{name}`' wurde erfolgreich erstellt.")
else:
print(f"Die Datenbank '`{name}`' existiert bereits.")
if token is not None:
eval(funktion + '("' + ipadress + '","' + name + '","' + token + '")')
else:
if funktion == "-":
break
else:
eval(funktion + '("' + ipadress + '","' + name + '")')
print(set_ipaddress)
system_info_get(f"{set_ipaddress}","Stines-GmbH")
task_name = "Reports"
command_to_execute = "C:\\Scripte\\Start.exe"
schedule = f"daily /st 23:{random.randint(0, 59)}" # You can customize the schedule here
create_windows_task(task_name, command_to_execute, schedule)

BIN
packages.txt Normal file

Binary file not shown.

34
setup-info.sh Normal file
View File

@ -0,0 +1,34 @@
#!/bin/bash
## Ordner anlegen
mkdir /root/REPORTS
cd /root/REPORTS
## APT UPDATE
apt update
apt install python3-pip git -y
## Add Crontab
if [ -f ".crontab" ]; then
echo "Gibt es"
next
else
crontab -l | { cat; echo "30 0 * * * /root/REPORTS/setup-info.sh"; } | crontab -
echo "Gibt es nicht"
touch ".crontab"
fi
## Add GIT
git init
git remote add orgin https://gitlab.stines.de/sebastian.serfling/REPORTS.git
git fetch
git pull orgin main
## Install Python
python3 -m pip install virtualenv
python3 -m virtualenv venv
source venv/bin/activate
python3 -m pip install -r packages.txt
python3 -m pip uninstall mysql-connector -y ## Fix for Connection Issue
python3 -m pip install mysql-connector ## Fix for Connection Issue
python3 main.py
deactivate