From 80a3f640c444f13fb4171d03bf026f778fd6013d Mon Sep 17 00:00:00 2001
From: LittleJake <465917717@qq.com>
Date: Sat, 15 Aug 2020 00:02:27 +0800
Subject: [PATCH 01/62] =?UTF-8?q?=E6=9B=B4=E6=96=B0linux=E8=84=9A=E6=9C=AC?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
linux/report.py | 165 ++++++++++++++++++++++++++++++++++++++++++++++++
1 file changed, 165 insertions(+)
create mode 100644 linux/report.py
diff --git a/linux/report.py b/linux/report.py
new file mode 100644
index 0000000..df936b3
--- /dev/null
+++ b/linux/report.py
@@ -0,0 +1,165 @@
+import redis, subprocess, time, json, hashlib, re, math
+
+HOST = ""
+PORT = ""
+PASSWORD = ""
+SALT = ""
+IP = "0.0.0.0"
+conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
+TIME = math.floor(time.time())
+
+
+def get_process_num():
+ # n-2 '''
+ p = subprocess.Popen("ps aux | wc -l",shell=True,stdout=subprocess.PIPE)
+ return p.stdout.readline().strip()
+
+
+def get_cpu_info():
+ # core modelname mhz'''
+ p = subprocess.Popen("cat /proc/cpuinfo | grep name | cut -f2 -d: |uniq",shell=True,stdout=subprocess.PIPE)
+ return p.stdout.readline().strip()
+
+
+def get_cpu_core():
+ # core modelname mhz'''
+ p = subprocess.Popen("cat /proc/cpuinfo |grep cores|wc -l",shell=True,stdout=subprocess.PIPE)
+ return p.stdout.readline().strip()
+
+
+def get_cpu_frequency():
+ # core modelname mhz'''
+ p = subprocess.Popen("cat /proc/cpuinfo |grep MHz|cut -f2 -d: |uniq",shell=True,stdout=subprocess.PIPE)
+ return p.stdout.readline().strip()
+
+
+def get_mem_info():
+ global IP, TIME
+ # Mem/Swap'''
+ p = subprocess.Popen("free -m|grep -E '(Mem|Swap)'|awk -F' ' '{print $2,$3,$4}'",shell=True,stdout=subprocess.PIPE)
+ info = {}
+ # Mem
+ tmp = p.stdout.readline().strip().split()
+ info['Mem'] = {
+ 'total': tmp[0],
+ 'used': tmp[1],
+ 'free': tmp[2]
+ }
+
+ conn.zadd("system_monitor:collection:memory:"+IP,{json.dumps({"time": TIME, "value": str(tmp[1])}):TIME})
+ # Swap
+ tmp = p.stdout.readline().strip().split()
+ info['Swap'] = {
+ 'total': tmp[0],
+ 'used': tmp[1],
+ 'free': tmp[2]
+ }
+ conn.zadd("system_monitor:collection:swap:"+IP,{json.dumps({"time": TIME, "value": str(tmp[1])}):TIME})
+ return info
+
+
+def get_sys_version():
+ # System and version'''
+ p = subprocess.Popen("cat /etc/redhat-release",shell=True,stdout=subprocess.PIPE)
+
+ return p.stdout.readline().strip()
+
+
+def get_disk_info():
+ global IP, TIME
+ # disk: total, usage, free, %'''
+ p = subprocess.Popen("df -h --total /|grep total|awk -F' ' '{print $2,$3,$4,$5}'",shell=True,stdout=subprocess.PIPE)
+ tmp = p.stdout.readline().strip().split()
+ info = {
+ 'total': tmp[0],
+ 'used': tmp[1],
+ 'free': tmp[2],
+ 'percent': tmp[3]
+ }
+ conn.zadd("system_monitor:collection:disk:"+IP,{json.dumps({"time": TIME, "value": str(tmp[1])}):TIME})
+ return info
+
+
+def get_ipv4():
+ # interface ipv4'''
+ p = subprocess.Popen("ip addr show scope global|grep inet\ |awk -F' ' '{print $2}'",shell=True,stdout=subprocess.PIPE)
+ return stdout_trim(p.stdout.readlines())
+
+
+def get_ipv6():
+ # interface ipv6
+ p = subprocess.Popen("ip addr show scope global|grep inet6|awk -F' ' '{print $2}'",shell=True,stdout=subprocess.PIPE)
+ return stdout_trim(p.stdout.readlines())
+
+
+def get_connections():
+ # establish
+ p = subprocess.Popen("netstat -na|grep ESTABLISHED|wc -l",shell=True,stdout=subprocess.PIPE)
+ return p.stdout.readline().strip()
+
+
+def get_uptime():
+ # uptime second
+ p = subprocess.Popen("cat /proc/uptime|awk -F' ' '{print $1}'",shell=True,stdout=subprocess.PIPE)
+ return p.stdout.readline().strip()
+
+def get_cpu_load():
+ global IP, TIME
+ # uptime second
+ p = subprocess.Popen("uptime|awk -F'load average:' '{print $2}'",shell=True,stdout=subprocess.PIPE)
+ tmp = p.stdout.readline().strip().replace(' ','').split(",")
+ load = {
+ '1min': tmp[0],
+ '5min': tmp[1],
+ '15min': tmp[2]
+ }
+ conn.zadd("system_monitor:collection:cpu:"+IP,{json.dumps({"time": TIME, "value": str(tmp[1])}):TIME})
+ return load
+
+def stdout_trim(so):
+ for i in range(0, len(so)):
+ so[i] = so[i].strip()
+ return so
+
+def get_aggragate_json():
+ info = {
+ 'Connection': get_connections(),
+ 'Disk': get_disk_info(),
+ 'Uptime': get_uptime(),
+ 'Memory': get_mem_info(),
+ 'Load': get_cpu_load(),
+ 'Process': get_process_num()
+ }
+ return json.dumps(info)
+
+
+def report():
+ conn.set("system_monitor:stat:"+IP, get_aggragate_json())
+
+
+def report_once():
+ '''ip'''
+ global IP, TIME
+ ip = get_ipv4()
+ if len(ip) > 0:
+ IP = ip[0]
+
+ info = {
+ "CPU":get_cpu_core()+"x "+get_cpu_info()+" @"+get_cpu_frequency()+"MHz",
+ "System Version": get_sys_version(),
+ "IPV4": re.sub("\.[0-9]*",".*", ",".join(get_ipv4())),
+ "IPV6": re.sub(":[a-zA-Z0-9]*",":*", ",".join(get_ipv6())),
+ "Update Time": TIME
+ }
+
+ conn.sadd("system_monitor:nodes",IP)
+ conn.hmset("system_monitor:hashes",{hashlib.sha256(IP+SALT).hexdigest(): IP})
+ conn.hmset("system_monitor:info:"+IP,info)
+ conn.zremrangebyscore("system_monitor:collection:cpu:"+IP,0,TIME-86400)
+ conn.zremrangebyscore("system_monitor:collection:disk:"+IP,0,TIME-86400)
+ conn.zremrangebyscore("system_monitor:collection:memory:"+IP,0,TIME-86400)
+ conn.zremrangebyscore("system_monitor:collection:swap:"+IP,0,TIME-86400)
+ report()
+
+
+report_once()
From 2c91a0745a59e220db5da5744a56c4bd94d70f53 Mon Sep 17 00:00:00 2001
From: LittleJake <465917717@qq.com>
Date: Mon, 1 Mar 2021 16:02:29 +0800
Subject: [PATCH 02/62] update using psutil.
---
linux/report.py | 235 ++++++++++++++++++++++++++++++++++--------------
1 file changed, 170 insertions(+), 65 deletions(-)
diff --git a/linux/report.py b/linux/report.py
index df936b3..225472e 100644
--- a/linux/report.py
+++ b/linux/report.py
@@ -1,66 +1,104 @@
-import redis, subprocess, time, json, hashlib, re, math
+import hashlib
+import json
+import math
+import re
+import redis
+import subprocess
+import time
+import requests
+import psutil
-HOST = ""
-PORT = ""
-PASSWORD = ""
-SALT = ""
+HOST = "redis-host"
+PORT = "redis-port"
+PASSWORD = "redis-password"
+SALT = "redis-salt"
IP = "0.0.0.0"
conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
TIME = math.floor(time.time())
+TIMEOUT = 259200
+
+
+def get_network():
+ network = {'RX': {
+ 'bytes': psutil.net_io_counters().bytes_recv,
+ 'packets': psutil.net_io_counters().packets_recv,
+ }, 'TX': {
+ 'bytes': psutil.net_io_counters().bytes_sent,
+ 'packets': psutil.net_io_counters().packets_sent,
+ }}
+
+ if conn.exists("system_monitor:collection:network:tmp:" + IP):
+ net0 = json.loads(conn.get("system_monitor:collection:network:tmp:" + IP))
+ if network['RX']['packets'] > net0['RX']['packets'] and network['RX']['bytes'] > net0['RX']['bytes'] and \
+ network['TX']['packets'] > net0['TX']['packets'] and network['TX']['bytes'] > net0['TX']['bytes']:
+ conn.zadd("system_monitor:collection:network:RX:" + IP,
+ {json.dumps(
+ {"time": TIME, "value": '{},{}'.format(network['RX']['packets'] - net0['RX']['packets'],
+ network['RX']['bytes'] - net0['RX']['bytes'])}): TIME})
+ conn.zadd("system_monitor:collection:network:TX:" + IP,
+ {json.dumps(
+ {"time": TIME, "value": '{},{}'.format(network['TX']['packets'] - net0['TX']['packets'],
+ network['TX']['bytes'] - net0['TX']['bytes'])}): TIME})
+
+ conn.set("system_monitor:collection:network:tmp:" + IP,
+ json.dumps(network), 86400)
+
+ return network
def get_process_num():
# n-2 '''
- p = subprocess.Popen("ps aux | wc -l",shell=True,stdout=subprocess.PIPE)
+ p = subprocess.Popen("ps aux | wc -l", shell=True, stdout=subprocess.PIPE)
return p.stdout.readline().strip()
def get_cpu_info():
# core modelname mhz'''
- p = subprocess.Popen("cat /proc/cpuinfo | grep name | cut -f2 -d: |uniq",shell=True,stdout=subprocess.PIPE)
+ p = subprocess.Popen("cat /proc/cpuinfo | egrep 'Processor|name' | cut -f2 -d: |uniq", shell=True,
+ stdout=subprocess.PIPE)
return p.stdout.readline().strip()
def get_cpu_core():
# core modelname mhz'''
- p = subprocess.Popen("cat /proc/cpuinfo |grep cores|wc -l",shell=True,stdout=subprocess.PIPE)
- return p.stdout.readline().strip()
+ return str(psutil.cpu_count())
-def get_cpu_frequency():
+def get_cpu_temp():
# core modelname mhz'''
- p = subprocess.Popen("cat /proc/cpuinfo |grep MHz|cut -f2 -d: |uniq",shell=True,stdout=subprocess.PIPE)
- return p.stdout.readline().strip()
+ p = subprocess.Popen("cat /sys/devices/virtual/thermal/thermal_zone0/temp", shell=True, stdout=subprocess.PIPE)
+ try:
+ tmp = p.stdout.readline().strip()
+ conn.zadd("system_monitor:collection:thermal:" + IP,
+ {json.dumps({"time": TIME, "value": str(int(tmp) * 1.0 / 1000)}): TIME})
+ return tmp
+ except:
+ return 0
def get_mem_info():
global IP, TIME
- # Mem/Swap'''
- p = subprocess.Popen("free -m|grep -E '(Mem|Swap)'|awk -F' ' '{print $2,$3,$4}'",shell=True,stdout=subprocess.PIPE)
- info = {}
- # Mem
- tmp = p.stdout.readline().strip().split()
- info['Mem'] = {
- 'total': tmp[0],
- 'used': tmp[1],
- 'free': tmp[2]
- }
-
- conn.zadd("system_monitor:collection:memory:"+IP,{json.dumps({"time": TIME, "value": str(tmp[1])}):TIME})
- # Swap
- tmp = p.stdout.readline().strip().split()
- info['Swap'] = {
- 'total': tmp[0],
- 'used': tmp[1],
- 'free': tmp[2]
- }
- conn.zadd("system_monitor:collection:swap:"+IP,{json.dumps({"time": TIME, "value": str(tmp[1])}):TIME})
+ info = {'Mem': {
+ 'total': '%.2f' % (psutil.virtual_memory().total / 1024 / 1024),
+ 'used': '%.2f' % (psutil.virtual_memory().used / 1024 / 1024),
+ 'free': '%.2f' % (psutil.virtual_memory().free / 1024 / 1024),
+ }, 'Swap': {
+ 'total': '%.2f' % (psutil.swap_memory().total / 1024 / 1024),
+ 'used': '%.2f' % (psutil.swap_memory().used / 1024 / 1024),
+ 'free': '%.2f' % (psutil.swap_memory().free / 1024 / 1024)
+ }}
+
+ conn.zadd("system_monitor:collection:memory:" + IP,
+ {json.dumps({"time": TIME, "value": str('%.2f' % (psutil.virtual_memory().used / 1024 / 1024))}): TIME})
+
+ conn.zadd("system_monitor:collection:swap:" + IP,
+ {json.dumps({"time": TIME, "value": str('%.2f' % (psutil.swap_memory().used / 1024 / 1024))}): TIME})
return info
def get_sys_version():
# System and version'''
- p = subprocess.Popen("cat /etc/redhat-release",shell=True,stdout=subprocess.PIPE)
+ p = subprocess.Popen(". /usr/lib/os-release;echo $PRETTY_NAME", shell=True, stdout=subprocess.PIPE)
return p.stdout.readline().strip()
@@ -68,7 +106,8 @@ def get_sys_version():
def get_disk_info():
global IP, TIME
# disk: total, usage, free, %'''
- p = subprocess.Popen("df -h --total /|grep total|awk -F' ' '{print $2,$3,$4,$5}'",shell=True,stdout=subprocess.PIPE)
+ p = subprocess.Popen("df -h --total /|grep total|awk -F' ' '{print $2,$3,$4,$5}'", shell=True,
+ stdout=subprocess.PIPE)
tmp = p.stdout.readline().strip().split()
info = {
'total': tmp[0],
@@ -76,51 +115,72 @@ def get_disk_info():
'free': tmp[2],
'percent': tmp[3]
}
- conn.zadd("system_monitor:collection:disk:"+IP,{json.dumps({"time": TIME, "value": str(tmp[1])}):TIME})
+ conn.zadd("system_monitor:collection:disk:" + IP,
+ {json.dumps({"time": TIME, "value": str(tmp[1])}): TIME})
return info
def get_ipv4():
# interface ipv4'''
- p = subprocess.Popen("ip addr show scope global|grep inet\ |awk -F' ' '{print $2}'",shell=True,stdout=subprocess.PIPE)
- return stdout_trim(p.stdout.readlines())
+ i = 5
+ while i > 0:
+ try:
+ resp = requests.get(url="http://v4.ipv6-test.com/api/myip.php", timeout=5)
+ if resp.status_code == 200:
+ return resp.text
+ except:
+ i = i - 1
+
+ return "None"
def get_ipv6():
# interface ipv6
- p = subprocess.Popen("ip addr show scope global|grep inet6|awk -F' ' '{print $2}'",shell=True,stdout=subprocess.PIPE)
- return stdout_trim(p.stdout.readlines())
+ i = 5
+ while i > 0:
+ try:
+ resp = requests.get(url="http://v6.ipv6-test.com/api/myip.php", timeout=5)
+ if resp.status_code == 200:
+ return resp.text
+ except:
+ i = i - 1
+
+ return "None"
def get_connections():
# establish
- p = subprocess.Popen("netstat -na|grep ESTABLISHED|wc -l",shell=True,stdout=subprocess.PIPE)
+ p = subprocess.Popen("netstat -na|grep ESTABLISHED|wc -l", shell=True, stdout=subprocess.PIPE)
return p.stdout.readline().strip()
def get_uptime():
# uptime second
- p = subprocess.Popen("cat /proc/uptime|awk -F' ' '{print $1}'",shell=True,stdout=subprocess.PIPE)
+ p = subprocess.Popen("cat /proc/uptime|awk -F' ' '{print $1}'", shell=True, stdout=subprocess.PIPE)
return p.stdout.readline().strip()
+
def get_cpu_load():
global IP, TIME
# uptime second
- p = subprocess.Popen("uptime|awk -F'load average:' '{print $2}'",shell=True,stdout=subprocess.PIPE)
- tmp = p.stdout.readline().strip().replace(' ','').split(",")
+ p = subprocess.Popen("uptime|awk -F'load average:' '{print $2}'", shell=True, stdout=subprocess.PIPE)
+ tmp = p.stdout.readline().strip().replace(' ', '').split(",")
load = {
'1min': tmp[0],
'5min': tmp[1],
'15min': tmp[2]
}
- conn.zadd("system_monitor:collection:cpu:"+IP,{json.dumps({"time": TIME, "value": str(tmp[1])}):TIME})
+ conn.zadd("system_monitor:collection:cpu:" + IP,
+ {json.dumps({"time": TIME, "value": str(psutil.cpu_percent())}): TIME})
return load
+
def stdout_trim(so):
for i in range(0, len(so)):
so[i] = so[i].strip()
return so
-
+
+
def get_aggragate_json():
info = {
'Connection': get_connections(),
@@ -128,38 +188,83 @@ def get_aggragate_json():
'Uptime': get_uptime(),
'Memory': get_mem_info(),
'Load': get_cpu_load(),
- 'Process': get_process_num()
+ 'Process': get_process_num(),
+ 'Network': get_network(),
+ 'Thermal': get_cpu_temp()
}
return json.dumps(info)
def report():
- conn.set("system_monitor:stat:"+IP, get_aggragate_json())
-
+ conn.set("system_monitor:stat:" + IP, get_aggragate_json())
+
+
+def delete_timeout():
+ with conn.pipeline(transaction=False) as p:
+ for k, v in conn.hgetall("system_monitor:hashes").items():
+ hashes = bytes.decode(k)
+ ip = bytes.decode(v)
+ if TIME - float(conn.hmget("system_monitor:info:" + ip, 'Update Time')[0]) > TIMEOUT:
+ p.srem("system_monitor:nodes", ip)
+ p.hdel("system_monitor:hashes", hashes)
+ p.delete("system_monitor:info:" + ip)
+ p.zremrangebyscore("system_monitor:collection:cpu:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:disk:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:memory:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:swap:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:network:RX:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:network:TX:" + ip, 0, TIME)
+ p.delete("system_monitor:stat:" + ip)
+ p.execute()
+
def report_once():
- '''ip'''
+ """ip"""
global IP, TIME
- ip = get_ipv4()
- if len(ip) > 0:
- IP = ip[0]
-
+ IP = get_ipv4()
+
info = {
- "CPU":get_cpu_core()+"x "+get_cpu_info()+" @"+get_cpu_frequency()+"MHz",
+ "CPU": get_cpu_core() + "x " + get_cpu_info(),
"System Version": get_sys_version(),
- "IPV4": re.sub("\.[0-9]*",".*", ",".join(get_ipv4())),
- "IPV6": re.sub(":[a-zA-Z0-9]*",":*", ",".join(get_ipv6())),
+ "IPV4": re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()),
+ "IPV6": re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()),
"Update Time": TIME
}
-
- conn.sadd("system_monitor:nodes",IP)
- conn.hmset("system_monitor:hashes",{hashlib.sha256(IP+SALT).hexdigest(): IP})
- conn.hmset("system_monitor:info:"+IP,info)
- conn.zremrangebyscore("system_monitor:collection:cpu:"+IP,0,TIME-86400)
- conn.zremrangebyscore("system_monitor:collection:disk:"+IP,0,TIME-86400)
- conn.zremrangebyscore("system_monitor:collection:memory:"+IP,0,TIME-86400)
- conn.zremrangebyscore("system_monitor:collection:swap:"+IP,0,TIME-86400)
+ with conn.pipeline(transaction=False) as pipeline:
+ pipeline.sadd("system_monitor:nodes", IP)
+ pipeline.hmset("system_monitor:hashes", {hashlib.sha256(IP + SALT).hexdigest(): IP})
+ pipeline.hmset("system_monitor:info:" + IP, info)
+ pipeline.zremrangebyscore("system_monitor:collection:cpu:" + IP, 0, TIME - 86400)
+ pipeline.zremrangebyscore("system_monitor:collection:thermal:" + IP, 0, TIME - 86400)
+ pipeline.zremrangebyscore("system_monitor:collection:disk:" + IP, 0, TIME - 86400)
+ pipeline.zremrangebyscore("system_monitor:collection:memory:" + IP, 0, TIME - 86400)
+ pipeline.zremrangebyscore("system_monitor:collection:swap:" + IP, 0, TIME - 86400)
+ pipeline.zremrangebyscore("system_monitor:collection:network:RX:" + IP, 0, TIME - 86400)
+ pipeline.zremrangebyscore("system_monitor:collection:network:TX:" + IP, 0, TIME - 86400)
+ pipeline.execute()
+
report()
+ delete_timeout()
-report_once()
+'''
+print("running processes")
+print(get_process_num())
+print("\ncpu info")
+print(get_cpu_info())
+print(get_cpu_core())
+print(get_cpu_temp())
+print("\nip")
+print(get_ipv4())
+print(get_ipv6())
+print("\nconnection")
+print(get_connections())
+print("\nuptime")
+print(get_uptime())
+print("\nsystem version")
+print(get_sys_version())
+print("\nMem & Disk")
+print(get_mem_info())
+print(get_disk_info())
+'''
+report_once()
\ No newline at end of file
From 55c4640acade3128ff92f36cb14dcbeb65675acb Mon Sep 17 00:00:00 2001
From: LittleJake <465917717@qq.com>
Date: Thu, 8 Apr 2021 17:29:15 +0800
Subject: [PATCH 03/62] 1. add gitignore 2. fix disk usage problem when less
than 1GB
---
.gitignore | 1 +
linux/report.py | 14 ++++++--------
2 files changed, 7 insertions(+), 8 deletions(-)
create mode 100644 .gitignore
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..723ef36
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1 @@
+.idea
\ No newline at end of file
diff --git a/linux/report.py b/linux/report.py
index 225472e..0c53b20 100644
--- a/linux/report.py
+++ b/linux/report.py
@@ -106,17 +106,15 @@ def get_sys_version():
def get_disk_info():
global IP, TIME
# disk: total, usage, free, %'''
- p = subprocess.Popen("df -h --total /|grep total|awk -F' ' '{print $2,$3,$4,$5}'", shell=True,
- stdout=subprocess.PIPE)
- tmp = p.stdout.readline().strip().split()
+ disk = psutil.disk_usage('/')
info = {
- 'total': tmp[0],
- 'used': tmp[1],
- 'free': tmp[2],
- 'percent': tmp[3]
+ 'total': '%.2f' % (disk.total/1073741824),
+ 'used': '%.2f' % (disk.used/1073741824),
+ 'free': '%.2f' % (disk.free/1073741824),
+ 'percent': disk.percent
}
conn.zadd("system_monitor:collection:disk:" + IP,
- {json.dumps({"time": TIME, "value": str(tmp[1])}): TIME})
+ {json.dumps({"time": TIME, "value": str('%.2f' % (disk.used/1073741824))}): TIME})
return info
From 907dbe33bcbace4c1dab01745ab0804d1a0eb0ab Mon Sep 17 00:00:00 2001
From: LittleJake <465917717@qq.com>
Date: Wed, 14 Apr 2021 16:11:56 +0800
Subject: [PATCH 04/62] update disk partition support.
---
linux/report.py | 51 ++++++++++++++++++++++++++++++++++++-------------
1 file changed, 38 insertions(+), 13 deletions(-)
diff --git a/linux/report.py b/linux/report.py
index 0c53b20..add06ec 100644
--- a/linux/report.py
+++ b/linux/report.py
@@ -17,6 +17,8 @@
TIME = math.floor(time.time())
TIMEOUT = 259200
+DISK_EXCLUDE = ['/run', '/sys', '/boot', '/dev', '/proc']
+
def get_network():
network = {'RX': {
@@ -79,20 +81,20 @@ def get_cpu_temp():
def get_mem_info():
global IP, TIME
info = {'Mem': {
- 'total': '%.2f' % (psutil.virtual_memory().total / 1024 / 1024),
- 'used': '%.2f' % (psutil.virtual_memory().used / 1024 / 1024),
- 'free': '%.2f' % (psutil.virtual_memory().free / 1024 / 1024),
+ 'total': '%.2f' % (psutil.virtual_memory().total*1.0/1048576),
+ 'used': '%.2f' % (psutil.virtual_memory().used*1.0/1048576),
+ 'free': '%.2f' % (psutil.virtual_memory().free*1.0/1048576),
}, 'Swap': {
- 'total': '%.2f' % (psutil.swap_memory().total / 1024 / 1024),
- 'used': '%.2f' % (psutil.swap_memory().used / 1024 / 1024),
- 'free': '%.2f' % (psutil.swap_memory().free / 1024 / 1024)
+ 'total': '%.2f' % (psutil.swap_memory().total*1.0/1048576),
+ 'used': '%.2f' % (psutil.swap_memory().used*1.0/1048576),
+ 'free': '%.2f' % (psutil.swap_memory().free*1.0/1048576)
}}
conn.zadd("system_monitor:collection:memory:" + IP,
- {json.dumps({"time": TIME, "value": str('%.2f' % (psutil.virtual_memory().used / 1024 / 1024))}): TIME})
+ {json.dumps({"time": TIME, "value": str('%.2f' % (psutil.virtual_memory().used / 1048576))}): TIME})
conn.zadd("system_monitor:collection:swap:" + IP,
- {json.dumps({"time": TIME, "value": str('%.2f' % (psutil.swap_memory().used / 1024 / 1024))}): TIME})
+ {json.dumps({"time": TIME, "value": str('%.2f' % (psutil.swap_memory().used / 1048576))}): TIME})
return info
@@ -103,19 +105,42 @@ def get_sys_version():
return p.stdout.readline().strip()
+def get_disk_partitions():
+ parts = psutil.disk_partitions(True)
+ result = []
+ for part in parts:
+ result.append(part)
+ for i in DISK_EXCLUDE:
+ if part.mountpoint.find(i) != -1:
+ result.remove(part)
+ break
+ return result
+
+
def get_disk_info():
global IP, TIME
# disk: total, usage, free, %'''
+ disks = {}
+
+ for partition in get_disk_partitions():
+ disk = psutil.disk_usage(partition.mountpoint)
+ disks[partition.mountpoint] = {
+ 'total': '%.2f' % (disk.total*1.0/1048576),
+ 'used': '%.2f' % (disk.used*1.0/1048576),
+ 'free': '%.2f' % (disk.free*1.0/1048576),
+ 'percent': disk.percent
+ }
+
disk = psutil.disk_usage('/')
info = {
- 'total': '%.2f' % (disk.total/1073741824),
- 'used': '%.2f' % (disk.used/1073741824),
- 'free': '%.2f' % (disk.free/1073741824),
+ 'total': '%.2f' % (disk.total*1.0/1048576),
+ 'used': '%.2f' % (disk.used*1.0/1048576),
+ 'free': '%.2f' % (disk.free*1.0/1048576),
'percent': disk.percent
}
conn.zadd("system_monitor:collection:disk:" + IP,
- {json.dumps({"time": TIME, "value": str('%.2f' % (disk.used/1073741824))}): TIME})
- return info
+ {json.dumps({"time": TIME, "value": disks}): TIME})
+ return disks
def get_ipv4():
From 1a5055d51b7c448984547b64b7094431608962f5 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Thu, 16 Sep 2021 17:29:47 +0800
Subject: [PATCH 05/62] update
1. Support Python3.8
2. optimize functions reuse
3. add test file
4. fix regex match
5. add try..except.. for the first run exception in the function
delete_offline.
---
linux/Test_report.py | 21 +++++
linux/report.py | 186 ++++++++++++++++++-------------------------
2 files changed, 99 insertions(+), 108 deletions(-)
create mode 100644 linux/Test_report.py
diff --git a/linux/Test_report.py b/linux/Test_report.py
new file mode 100644
index 0000000..ad5ec5b
--- /dev/null
+++ b/linux/Test_report.py
@@ -0,0 +1,21 @@
+from report import *
+
+print("running processes")
+print(get_process_num())
+print("\ncpu info")
+print(get_cpu_info())
+print(get_cpu_core())
+print(get_cpu_load())
+print(get_cpu_temp())
+print("\nip")
+print(get_ipv4())
+print(get_ipv6())
+print("\nconnection")
+print(get_connections())
+print("\nuptime")
+print(get_uptime())
+print("\nsystem version")
+print(get_sys_version())
+print("\nMem & Disk")
+print(get_mem_info())
+print(get_disk_info())
diff --git a/linux/report.py b/linux/report.py
index add06ec..d779721 100644
--- a/linux/report.py
+++ b/linux/report.py
@@ -16,8 +16,8 @@
conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
TIME = math.floor(time.time())
TIMEOUT = 259200
-
-DISK_EXCLUDE = ['/run', '/sys', '/boot', '/dev', '/proc']
+TIME_PERIOD = 86400
+DISK_EXCLUDE = ['/run', '/sys', '/boot', '/dev', '/proc', '/gdrive']
def get_network():
@@ -43,22 +43,20 @@ def get_network():
network['TX']['bytes'] - net0['TX']['bytes'])}): TIME})
conn.set("system_monitor:collection:network:tmp:" + IP,
- json.dumps(network), 86400)
-
+ json.dumps(network), TIME_PERIOD)
return network
def get_process_num():
# n-2 '''
p = subprocess.Popen("ps aux | wc -l", shell=True, stdout=subprocess.PIPE)
- return p.stdout.readline().strip()
+ return b2s(p.stdout.readline().strip())
def get_cpu_info():
# core modelname mhz'''
- p = subprocess.Popen("cat /proc/cpuinfo | egrep 'Processor|name' | cut -f2 -d: |uniq", shell=True,
- stdout=subprocess.PIPE)
- return p.stdout.readline().strip()
+ p = subprocess.Popen("cat /proc/cpuinfo | egrep 'Processor|name' | cut -f2 -d: |uniq", shell=True, stdout=subprocess.PIPE)
+ return b2s(p.stdout.readline().strip())
def get_cpu_core():
@@ -70,9 +68,9 @@ def get_cpu_temp():
# core modelname mhz'''
p = subprocess.Popen("cat /sys/devices/virtual/thermal/thermal_zone0/temp", shell=True, stdout=subprocess.PIPE)
try:
- tmp = p.stdout.readline().strip()
+ tmp = b2s(p.stdout.readline().strip())
conn.zadd("system_monitor:collection:thermal:" + IP,
- {json.dumps({"time": TIME, "value": str(int(tmp) * 1.0 / 1000)}): TIME})
+ {json.dumps({"time": TIME, "value": str(int(tmp)*1.0/1000)}): TIME})
return tmp
except:
return 0
@@ -81,20 +79,20 @@ def get_cpu_temp():
def get_mem_info():
global IP, TIME
info = {'Mem': {
- 'total': '%.2f' % (psutil.virtual_memory().total*1.0/1048576),
- 'used': '%.2f' % (psutil.virtual_memory().used*1.0/1048576),
- 'free': '%.2f' % (psutil.virtual_memory().free*1.0/1048576),
+ 'total': format_MB(psutil.virtual_memory().total),
+ 'used': format_MB(psutil.virtual_memory().used),
+ 'free': format_MB(psutil.virtual_memory().free),
}, 'Swap': {
- 'total': '%.2f' % (psutil.swap_memory().total*1.0/1048576),
- 'used': '%.2f' % (psutil.swap_memory().used*1.0/1048576),
- 'free': '%.2f' % (psutil.swap_memory().free*1.0/1048576)
+ 'total': format_MB(psutil.swap_memory().total),
+ 'used': format_MB(psutil.swap_memory().used),
+ 'free': format_MB(psutil.swap_memory().free)
}}
conn.zadd("system_monitor:collection:memory:" + IP,
- {json.dumps({"time": TIME, "value": str('%.2f' % (psutil.virtual_memory().used / 1048576))}): TIME})
+ {json.dumps({"time": TIME, "value": format_MB(psutil.virtual_memory().used)}): TIME})
conn.zadd("system_monitor:collection:swap:" + IP,
- {json.dumps({"time": TIME, "value": str('%.2f' % (psutil.swap_memory().used / 1048576))}): TIME})
+ {json.dumps({"time": TIME, "value": format_MB(psutil.swap_memory().used)}): TIME})
return info
@@ -102,7 +100,7 @@ def get_sys_version():
# System and version'''
p = subprocess.Popen(". /usr/lib/os-release;echo $PRETTY_NAME", shell=True, stdout=subprocess.PIPE)
- return p.stdout.readline().strip()
+ return b2s(p.stdout.readline().strip())
def get_disk_partitions():
@@ -125,19 +123,12 @@ def get_disk_info():
for partition in get_disk_partitions():
disk = psutil.disk_usage(partition.mountpoint)
disks[partition.mountpoint] = {
- 'total': '%.2f' % (disk.total*1.0/1048576),
- 'used': '%.2f' % (disk.used*1.0/1048576),
- 'free': '%.2f' % (disk.free*1.0/1048576),
+ 'total': format_MB(disk.total),
+ 'used': format_MB(disk.used),
+ 'free': format_MB(disk.free),
'percent': disk.percent
}
- disk = psutil.disk_usage('/')
- info = {
- 'total': '%.2f' % (disk.total*1.0/1048576),
- 'used': '%.2f' % (disk.used*1.0/1048576),
- 'free': '%.2f' % (disk.free*1.0/1048576),
- 'percent': disk.percent
- }
conn.zadd("system_monitor:collection:disk:" + IP,
{json.dumps({"time": TIME, "value": disks}): TIME})
return disks
@@ -145,49 +136,30 @@ def get_disk_info():
def get_ipv4():
# interface ipv4'''
- i = 5
- while i > 0:
- try:
- resp = requests.get(url="http://v4.ipv6-test.com/api/myip.php", timeout=5)
- if resp.status_code == 200:
- return resp.text
- except:
- i = i - 1
-
- return "None"
+ return fetch_url("http://v4.ipv6-test.com/api/myip.php")
def get_ipv6():
# interface ipv6
- i = 5
- while i > 0:
- try:
- resp = requests.get(url="http://v6.ipv6-test.com/api/myip.php", timeout=5)
- if resp.status_code == 200:
- return resp.text
- except:
- i = i - 1
-
- return "None"
+ return fetch_url("http://v6.ipv6-test.com/api/myip.php")
def get_connections():
# establish
p = subprocess.Popen("netstat -na|grep ESTABLISHED|wc -l", shell=True, stdout=subprocess.PIPE)
- return p.stdout.readline().strip()
+ return b2s(p.stdout.readline().strip())
def get_uptime():
# uptime second
- p = subprocess.Popen("cat /proc/uptime|awk -F' ' '{print $1}'", shell=True, stdout=subprocess.PIPE)
- return p.stdout.readline().strip()
+ return time.time() - psutil.boot_time()
def get_cpu_load():
global IP, TIME
# uptime second
p = subprocess.Popen("uptime|awk -F'load average:' '{print $2}'", shell=True, stdout=subprocess.PIPE)
- tmp = p.stdout.readline().strip().replace(' ', '').split(",")
+ tmp = b2s(p.stdout.readline().strip()).replace(' ', '').split(",")
load = {
'1min': tmp[0],
'5min': tmp[1],
@@ -198,12 +170,6 @@ def get_cpu_load():
return load
-def stdout_trim(so):
- for i in range(0, len(so)):
- so[i] = so[i].strip()
- return so
-
-
def get_aggragate_json():
info = {
'Connection': get_connections(),
@@ -218,27 +184,50 @@ def get_aggragate_json():
return json.dumps(info)
+def b2s(b):
+ return str(b, encoding="utf-8")
+
+
def report():
conn.set("system_monitor:stat:" + IP, get_aggragate_json())
-def delete_timeout():
+def delete_offline():
with conn.pipeline(transaction=False) as p:
- for k, v in conn.hgetall("system_monitor:hashes").items():
- hashes = bytes.decode(k)
- ip = bytes.decode(v)
- if TIME - float(conn.hmget("system_monitor:info:" + ip, 'Update Time')[0]) > TIMEOUT:
- p.srem("system_monitor:nodes", ip)
- p.hdel("system_monitor:hashes", hashes)
- p.delete("system_monitor:info:" + ip)
- p.zremrangebyscore("system_monitor:collection:cpu:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:disk:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:memory:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:swap:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:network:RX:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:network:TX:" + ip, 0, TIME)
- p.delete("system_monitor:stat:" + ip)
- p.execute()
+ try:
+ for k, v in conn.hgetall("system_monitor:hashes").items():
+ hashes = bytes.decode(k)
+ ip = bytes.decode(v)
+ if TIME - float(conn.hmget("system_monitor:info:" + ip, 'Update Time')[0]) > TIMEOUT:
+ p.srem("system_monitor:nodes", ip)
+ p.hdel("system_monitor:hashes", hashes)
+ p.delete("system_monitor:info:" + ip)
+ p.zremrangebyscore("system_monitor:collection:cpu:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:disk:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:memory:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:swap:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:network:RX:" + ip, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:network:TX:" + ip, 0, TIME)
+ p.delete("system_monitor:stat:" + ip)
+ p.execute()
+ except:
+ pass
+
+
+def format_MB(v):
+ return '%.2f' % (v * 1.0 / 1048576)
+
+
+def fetch_url(url):
+ i = 5
+ while i > 0:
+ try:
+ resp = requests.get(url=url, timeout=5)
+ if resp.status_code == 200:
+ return resp.text
+ except:
+ i = i - 1
+ return "None"
def report_once():
@@ -249,45 +238,26 @@ def report_once():
info = {
"CPU": get_cpu_core() + "x " + get_cpu_info(),
"System Version": get_sys_version(),
- "IPV4": re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()),
+ "IPV4": re.sub("[0-9]*\\.[0-9]*\\.[0-9]*", "*.*.*", get_ipv4()),
"IPV6": re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()),
"Update Time": TIME
}
with conn.pipeline(transaction=False) as pipeline:
pipeline.sadd("system_monitor:nodes", IP)
- pipeline.hmset("system_monitor:hashes", {hashlib.sha256(IP + SALT).hexdigest(): IP})
+ pipeline.hmset("system_monitor:hashes", {hashlib.sha256((IP + SALT).encode("utf-8")).hexdigest(): IP})
pipeline.hmset("system_monitor:info:" + IP, info)
- pipeline.zremrangebyscore("system_monitor:collection:cpu:" + IP, 0, TIME - 86400)
- pipeline.zremrangebyscore("system_monitor:collection:thermal:" + IP, 0, TIME - 86400)
- pipeline.zremrangebyscore("system_monitor:collection:disk:" + IP, 0, TIME - 86400)
- pipeline.zremrangebyscore("system_monitor:collection:memory:" + IP, 0, TIME - 86400)
- pipeline.zremrangebyscore("system_monitor:collection:swap:" + IP, 0, TIME - 86400)
- pipeline.zremrangebyscore("system_monitor:collection:network:RX:" + IP, 0, TIME - 86400)
- pipeline.zremrangebyscore("system_monitor:collection:network:TX:" + IP, 0, TIME - 86400)
+ pipeline.zremrangebyscore("system_monitor:collection:cpu:" + IP, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:thermal:" + IP, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:disk:" + IP, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:memory:" + IP, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:swap:" + IP, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:network:RX:" + IP, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:network:TX:" + IP, 0, TIME - TIME_PERIOD)
pipeline.execute()
report()
- delete_timeout()
-
-
-'''
-print("running processes")
-print(get_process_num())
-print("\ncpu info")
-print(get_cpu_info())
-print(get_cpu_core())
-print(get_cpu_temp())
-print("\nip")
-print(get_ipv4())
-print(get_ipv6())
-print("\nconnection")
-print(get_connections())
-print("\nuptime")
-print(get_uptime())
-print("\nsystem version")
-print(get_sys_version())
-print("\nMem & Disk")
-print(get_mem_info())
-print(get_disk_info())
-'''
-report_once()
\ No newline at end of file
+ delete_offline()
+ conn.close()
+
+
+report_once()
From ebef363a59f50dbeaa155e47d88dd8f7dc322abb Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Fri, 17 Dec 2021 11:09:45 +0800
Subject: [PATCH 06/62] Update report.py
Update for new release 2.0-beta
---
linux/report.py | 79 ++++++++++++++++++++++++++-----------------------
1 file changed, 42 insertions(+), 37 deletions(-)
diff --git a/linux/report.py b/linux/report.py
index d779721..4dc1ef4 100644
--- a/linux/report.py
+++ b/linux/report.py
@@ -7,18 +7,27 @@
import time
import requests
import psutil
+import uuid
HOST = "redis-host"
PORT = "redis-port"
PASSWORD = "redis-password"
SALT = "redis-salt"
+UUID = str(uuid.uuid4()).replace("-", "")
IP = "0.0.0.0"
-conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
TIME = math.floor(time.time())
TIMEOUT = 259200
TIME_PERIOD = 86400
-DISK_EXCLUDE = ['/run', '/sys', '/boot', '/dev', '/proc', '/gdrive']
+DISK_EXCLUDE = ['/run', '/sys', '/boot', '/dev', '/proc', '/gdrive', '/var/lib/']
+
+conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
+if os.path.isfile('.uuid'):
+ with open('.uuid', 'r') as fp:
+ UUID = fp.read().strip()
+else:
+ with open('.uuid', 'w') as fp:
+ fp.write(UUID)
def get_network():
network = {'RX': {
@@ -29,20 +38,20 @@ def get_network():
'packets': psutil.net_io_counters().packets_sent,
}}
- if conn.exists("system_monitor:collection:network:tmp:" + IP):
- net0 = json.loads(conn.get("system_monitor:collection:network:tmp:" + IP))
+ if conn.exists("system_monitor:collection:network:tmp:" + UUID):
+ net0 = json.loads(conn.get("system_monitor:collection:network:tmp:" + UUID))
if network['RX']['packets'] > net0['RX']['packets'] and network['RX']['bytes'] > net0['RX']['bytes'] and \
network['TX']['packets'] > net0['TX']['packets'] and network['TX']['bytes'] > net0['TX']['bytes']:
- conn.zadd("system_monitor:collection:network:RX:" + IP,
+ conn.zadd("system_monitor:collection:network:RX:" + UUID,
{json.dumps(
{"time": TIME, "value": '{},{}'.format(network['RX']['packets'] - net0['RX']['packets'],
network['RX']['bytes'] - net0['RX']['bytes'])}): TIME})
- conn.zadd("system_monitor:collection:network:TX:" + IP,
+ conn.zadd("system_monitor:collection:network:TX:" + UUID,
{json.dumps(
{"time": TIME, "value": '{},{}'.format(network['TX']['packets'] - net0['TX']['packets'],
network['TX']['bytes'] - net0['TX']['bytes'])}): TIME})
- conn.set("system_monitor:collection:network:tmp:" + IP,
+ conn.set("system_monitor:collection:network:tmp:" + UUID,
json.dumps(network), TIME_PERIOD)
return network
@@ -50,7 +59,8 @@ def get_network():
def get_process_num():
# n-2 '''
p = subprocess.Popen("ps aux | wc -l", shell=True, stdout=subprocess.PIPE)
- return b2s(p.stdout.readline().strip())
+ return
+(p.stdout.readline().strip())
def get_cpu_info():
@@ -69,7 +79,7 @@ def get_cpu_temp():
p = subprocess.Popen("cat /sys/devices/virtual/thermal/thermal_zone0/temp", shell=True, stdout=subprocess.PIPE)
try:
tmp = b2s(p.stdout.readline().strip())
- conn.zadd("system_monitor:collection:thermal:" + IP,
+ conn.zadd("system_monitor:collection:thermal:" + UUID,
{json.dumps({"time": TIME, "value": str(int(tmp)*1.0/1000)}): TIME})
return tmp
except:
@@ -77,7 +87,6 @@ def get_cpu_temp():
def get_mem_info():
- global IP, TIME
info = {'Mem': {
'total': format_MB(psutil.virtual_memory().total),
'used': format_MB(psutil.virtual_memory().used),
@@ -88,10 +97,10 @@ def get_mem_info():
'free': format_MB(psutil.swap_memory().free)
}}
- conn.zadd("system_monitor:collection:memory:" + IP,
+ conn.zadd("system_monitor:collection:memory:" + UUID,
{json.dumps({"time": TIME, "value": format_MB(psutil.virtual_memory().used)}): TIME})
- conn.zadd("system_monitor:collection:swap:" + IP,
+ conn.zadd("system_monitor:collection:swap:" + UUID,
{json.dumps({"time": TIME, "value": format_MB(psutil.swap_memory().used)}): TIME})
return info
@@ -116,7 +125,6 @@ def get_disk_partitions():
def get_disk_info():
- global IP, TIME
# disk: total, usage, free, %'''
disks = {}
@@ -129,7 +137,7 @@ def get_disk_info():
'percent': disk.percent
}
- conn.zadd("system_monitor:collection:disk:" + IP,
+ conn.zadd("system_monitor:collection:disk:" + UUID,
{json.dumps({"time": TIME, "value": disks}): TIME})
return disks
@@ -156,7 +164,6 @@ def get_uptime():
def get_cpu_load():
- global IP, TIME
# uptime second
p = subprocess.Popen("uptime|awk -F'load average:' '{print $2}'", shell=True, stdout=subprocess.PIPE)
tmp = b2s(p.stdout.readline().strip()).replace(' ', '').split(",")
@@ -165,7 +172,7 @@ def get_cpu_load():
'5min': tmp[1],
'15min': tmp[2]
}
- conn.zadd("system_monitor:collection:cpu:" + IP,
+ conn.zadd("system_monitor:collection:cpu:" + UUID,
{json.dumps({"time": TIME, "value": str(psutil.cpu_percent())}): TIME})
return load
@@ -189,7 +196,7 @@ def b2s(b):
def report():
- conn.set("system_monitor:stat:" + IP, get_aggragate_json())
+ conn.set("system_monitor:stat:" + UUID, get_aggragate_json())
def delete_offline():
@@ -198,17 +205,16 @@ def delete_offline():
for k, v in conn.hgetall("system_monitor:hashes").items():
hashes = bytes.decode(k)
ip = bytes.decode(v)
- if TIME - float(conn.hmget("system_monitor:info:" + ip, 'Update Time')[0]) > TIMEOUT:
- p.srem("system_monitor:nodes", ip)
+ if conn.exsits("system_monitor:info:" + hashes) and TIME - float(conn.hmget("system_monitor:info:" + hashes, 'Update Time')[0]) > TIMEOUT:
p.hdel("system_monitor:hashes", hashes)
- p.delete("system_monitor:info:" + ip)
- p.zremrangebyscore("system_monitor:collection:cpu:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:disk:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:memory:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:swap:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:network:RX:" + ip, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:network:TX:" + ip, 0, TIME)
- p.delete("system_monitor:stat:" + ip)
+ p.delete("system_monitor:info:" + hashes)
+ p.zremrangebyscore("system_monitor:collection:cpu:" + hashes, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:disk:" + hashes, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:memory:" + hashes, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:swap:" + hashes, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:network:RX:" + hashes, 0, TIME)
+ p.zremrangebyscore("system_monitor:collection:network:TX:" + hashes, 0, TIME)
+ p.delete("system_monitor:stat:" + hashes)
p.execute()
except:
pass
@@ -243,16 +249,15 @@ def report_once():
"Update Time": TIME
}
with conn.pipeline(transaction=False) as pipeline:
- pipeline.sadd("system_monitor:nodes", IP)
- pipeline.hmset("system_monitor:hashes", {hashlib.sha256((IP + SALT).encode("utf-8")).hexdigest(): IP})
- pipeline.hmset("system_monitor:info:" + IP, info)
- pipeline.zremrangebyscore("system_monitor:collection:cpu:" + IP, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:thermal:" + IP, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:disk:" + IP, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:memory:" + IP, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:swap:" + IP, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:network:RX:" + IP, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:network:TX:" + IP, 0, TIME - TIME_PERIOD)
+ pipeline.hmset("system_monitor:hashes", {UUID: IP})
+ pipeline.hmset("system_monitor:info:" + UUID, info)
+ pipeline.zremrangebyscore("system_monitor:collection:cpu:" + UUID, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:thermal:" + UUID, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:disk:" + UUID, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:memory:" + UUID, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:swap:" + UUID, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:network:RX:" + UUID, 0, TIME - TIME_PERIOD)
+ pipeline.zremrangebyscore("system_monitor:collection:network:TX:" + UUID, 0, TIME - TIME_PERIOD)
pipeline.execute()
report()
From 991e2550e224d8d57fb2689901c4b510bfb127e9 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Fri, 17 Dec 2021 11:12:42 +0800
Subject: [PATCH 07/62] Update report.py
fix import and remove unused variable
---
linux/report.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/linux/report.py b/linux/report.py
index 4dc1ef4..c3002f6 100644
--- a/linux/report.py
+++ b/linux/report.py
@@ -8,11 +8,11 @@
import requests
import psutil
import uuid
+import os
HOST = "redis-host"
PORT = "redis-port"
PASSWORD = "redis-password"
-SALT = "redis-salt"
UUID = str(uuid.uuid4()).replace("-", "")
IP = "0.0.0.0"
TIME = math.floor(time.time())
From 15a26a7508aff058aeda3dbe8411e0e64346afde Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Fri, 17 Dec 2021 11:15:20 +0800
Subject: [PATCH 08/62] Update report.py
optimize remove old data
---
linux/report.py | 34 ++++++++++++++--------------------
1 file changed, 14 insertions(+), 20 deletions(-)
diff --git a/linux/report.py b/linux/report.py
index c3002f6..9c313da 100644
--- a/linux/report.py
+++ b/linux/report.py
@@ -59,8 +59,7 @@ def get_network():
def get_process_num():
# n-2 '''
p = subprocess.Popen("ps aux | wc -l", shell=True, stdout=subprocess.PIPE)
- return
-(p.stdout.readline().strip())
+ return b2s(p.stdout.readline().strip())
def get_cpu_info():
@@ -205,17 +204,19 @@ def delete_offline():
for k, v in conn.hgetall("system_monitor:hashes").items():
hashes = bytes.decode(k)
ip = bytes.decode(v)
- if conn.exsits("system_monitor:info:" + hashes) and TIME - float(conn.hmget("system_monitor:info:" + hashes, 'Update Time')[0]) > TIMEOUT:
- p.hdel("system_monitor:hashes", hashes)
- p.delete("system_monitor:info:" + hashes)
- p.zremrangebyscore("system_monitor:collection:cpu:" + hashes, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:disk:" + hashes, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:memory:" + hashes, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:swap:" + hashes, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:network:RX:" + hashes, 0, TIME)
- p.zremrangebyscore("system_monitor:collection:network:TX:" + hashes, 0, TIME)
- p.delete("system_monitor:stat:" + hashes)
- p.execute()
+ if conn.exsits("system_monitor:info:" + hashes):
+ p.zremrangebyscore("system_monitor:collection:cpu:" + hashes, 0, TIME - TIME_PERIOD)
+ p.zremrangebyscore("system_monitor:collection:disk:" + hashes, 0, TIME - TIME_PERIOD)
+ p.zremrangebyscore("system_monitor:collection:memory:" + hashes, 0, TIME - TIME_PERIOD)
+ p.zremrangebyscore("system_monitor:collection:swap:" + hashes, 0, TIME - TIME_PERIOD)
+ p.zremrangebyscore("system_monitor:collection:network:RX:" + hashes, 0, TIME - TIME_PERIOD)
+ p.zremrangebyscore("system_monitor:collection:network:TX:" + hashes, 0, TIME - TIME_PERIOD)
+
+ if TIME - float(conn.hmget("system_monitor:info:" + hashes, 'Update Time')[0]) > TIMEOUT:
+ p.hdel("system_monitor:hashes", hashes)
+ p.delete("system_monitor:info:" + hashes)
+ p.delete("system_monitor:stat:" + hashes)
+ p.execute()
except:
pass
@@ -251,13 +252,6 @@ def report_once():
with conn.pipeline(transaction=False) as pipeline:
pipeline.hmset("system_monitor:hashes", {UUID: IP})
pipeline.hmset("system_monitor:info:" + UUID, info)
- pipeline.zremrangebyscore("system_monitor:collection:cpu:" + UUID, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:thermal:" + UUID, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:disk:" + UUID, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:memory:" + UUID, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:swap:" + UUID, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:network:RX:" + UUID, 0, TIME - TIME_PERIOD)
- pipeline.zremrangebyscore("system_monitor:collection:network:TX:" + UUID, 0, TIME - TIME_PERIOD)
pipeline.execute()
report()
From 78c2ca5e61df336045205dd7981724742769abd0 Mon Sep 17 00:00:00 2001
From: LittleJake <13583702+LittleJake@users.noreply.github.com>
Date: Fri, 8 Dec 2023 19:06:15 +0800
Subject: [PATCH 09/62] Update
1. Update action autobuild.
2. Support multiple platform
3. Compatible with the Server-Monitor
---
.github/workflows/build.yml | 316 +++++++++++++++++++++++++++
build-script/build.alpine.release.sh | 17 ++
linux/Test_report.py | 21 --
linux/report.py | 262 ----------------------
report.py | 264 ++++++++++++++++++++++
requirements.txt | 5 +
6 files changed, 602 insertions(+), 283 deletions(-)
create mode 100644 .github/workflows/build.yml
create mode 100644 build-script/build.alpine.release.sh
delete mode 100644 linux/Test_report.py
delete mode 100644 linux/report.py
create mode 100644 report.py
create mode 100644 requirements.txt
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000..80d3719
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,316 @@
+name: GitHub CI
+on:
+ push:
+ branches: [master]
+ workflow_dispatch:
+ pull_request:
+
+concurrency:
+ group: ${{ github.ref }}-${{ github.workflow }}
+ cancel-in-progress: true
+
+jobs:
+ linux32_build:
+ name: Linux x86 Build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ # - name: Add commit id into version
+ # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
+ # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
+ - name: Build
+ run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:x86-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+
+ - name: Package Release
+ run: tar czf server_monitor_linux32.tar.gz monitor
+
+ - name: Upload
+ uses: actions/upload-artifact@v3
+ with:
+ path: |
+ server_monitor_*.tar.gz
+
+ - name: Draft Release
+ uses: softprops/action-gh-release@v1
+ if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ with:
+ files: server_monitor_linux32.tar.gz
+ draft: true
+ tag_name: Alpha
+
+ linux64_build:
+ name: Linux x86_64 Build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ # - name: Add commit id into version
+ # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
+ # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
+ - name: Build
+ run: docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:amd64-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+
+ - name: Package Release
+ run: tar czf server_monitor_linux64.tar.gz monitor
+
+ - name: Upload
+ uses: actions/upload-artifact@v3
+ with:
+ path: |
+ server_monitor_*.tar.gz
+
+ - name: Draft Release
+ uses: softprops/action-gh-release@v1
+ if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ with:
+ files: server_monitor_linux64.tar.gz
+ draft: true
+ tag_name: Alpha
+
+ armv7_build:
+ name: Linux armv7 Build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+# - name: Set up QEMU
+# uses: docker/setup-qemu-action@v2
+ # - name: Add commit id into version
+ # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
+ # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
+ - name: Build
+ run: |
+ docker run --rm --privileged multiarch/qemu-user-static:register --reset
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:armv7-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+ - name: Package Release
+ run: tar czf server_monitor_armv7.tar.gz monitor
+
+ - name: Upload
+ uses: actions/upload-artifact@v3
+ with:
+ path: |
+ server_monitor_*.tar.gz
+
+ - name: Draft Release
+ uses: softprops/action-gh-release@v1
+ if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ with:
+ files: server_monitor_armv7.tar.gz
+ draft: true
+ tag_name: Alpha
+
+ aarch64_build:
+ name: Linux aarch64 Build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+# - name: Set up QEMU
+# uses: docker/setup-qemu-action@v2
+ # - name: Add commit id into version
+ # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
+ # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
+ - name: Build
+ run: |
+ docker run --rm --privileged multiarch/qemu-user-static:register --reset
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:aarch64-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+ - name: Package Release
+ run: tar czf server_monitor_aarch64.tar.gz monitor
+
+ - name: Upload
+ uses: actions/upload-artifact@v3
+ with:
+ path: |
+ server_monitor_*.tar.gz
+
+ - name: Draft Release
+ uses: softprops/action-gh-release@v1
+ if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ with:
+ files: server_monitor_aarch64.tar.gz
+ draft: true
+ tag_name: Alpha
+
+ armhf_build:
+ name: Linux armhf Build
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+# - name: Set up QEMU
+# uses: docker/setup-qemu-action@v2
+ # - name: Add commit id into version
+ # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
+ # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
+ - name: Build
+ run: |
+ docker run --rm --privileged multiarch/qemu-user-static:register --reset
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:armhf-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+ - name: Package Release
+ run: tar czf server_monitor_armhf.tar.gz monitor
+
+ - name: Upload
+ uses: actions/upload-artifact@v3
+ with:
+ path: |
+ server_monitor_*.tar.gz
+
+ - name: Draft Release
+ uses: softprops/action-gh-release@v1
+ if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ with:
+ files: server_monitor_armhf.tar.gz
+ draft: true
+ tag_name: Alpha
+
+ # macos_build:
+ # name: macOS Build
+ # runs-on: macos-latest
+ # steps:
+ # - uses: actions/checkout@v3
+ # - name: Add commit id into version
+ # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
+ # run: SHA=$(git rev-parse --short HEAD) && sed -i -e 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
+ # - name: Build
+ # run: bash build-script/build.macos.release.sh
+
+ # - name: Package Release
+ # run: tar czf server_monitor_darwin64.tar.gz monitor
+
+ # - name: Upload
+ # uses: actions/upload-artifact@v3
+ # with:
+ # path: |
+ # server_monitor_*.tar.gz
+
+ # - name: Draft Release
+ # uses: softprops/action-gh-release@v1
+ # if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ # with:
+ # files: server_monitor_darwin64.tar.gz
+ # draft: true
+ # tag_name: Alpha
+
+ # windows64_build:
+ # name: Windows x86_64 Build
+ # runs-on: windows-latest
+ # defaults:
+ # run:
+ # shell: msys2 {0}
+ # steps:
+ # - uses: actions/checkout@v3
+ # - uses: actions/setup-node@v3
+ # with:
+ # node-version: "16"
+ # - uses: msys2/setup-msys2@v2
+ # with:
+ # update: true
+ # install: base-devel git mingw-w64-x86_64-gcc mingw-w64-x86_64-cmake mingw-w64-x86_64-libevent mingw-w64-x86_64-pcre2 patch
+ # msystem: MINGW64
+ # path-type: inherit
+ # - name: Add commit id into version
+ # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
+ # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
+ # - name: Build
+ # run: bash build-script/build.windows.release.sh
+
+ # - name: Package Release
+ # run: 7z a server_monitor_win64.7z monitor/
+
+ # - name: Upload
+ # uses: actions/upload-artifact@v3
+ # with:
+ # path: |
+ # server_monitor_*.7z
+
+ # - name: Draft Release
+ # uses: softprops/action-gh-release@v1
+ # if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ # with:
+ # files: server_monitor_win64.7z
+ # draft: true
+ # tag_name: Alpha
+
+ # windows32_build:
+ # name: Windows x86 Build
+ # runs-on: windows-latest
+ # defaults:
+ # run:
+ # shell: msys2 {0}
+ # steps:
+ # - uses: actions/checkout@v3
+ # - uses: actions/setup-node@v3
+ # with:
+ # node-version: "16"
+ # - uses: msys2/setup-msys2@v2
+ # with:
+ # update: true
+ # install: base-devel git mingw-w64-i686-gcc mingw-w64-i686-cmake mingw-w64-i686-libevent mingw-w64-i686-pcre2 patch
+ # msystem: MINGW32
+ # path-type: inherit
+ # - name: Add commit id into version
+ # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
+ # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
+ # - name: Build
+ # run: bash build-script/build.windows.release.sh
+
+ # - name: Package Release
+ # run: 7z a server_monitor_win32.7z monitor/
+
+ # - name: Upload
+ # uses: actions/upload-artifact@v3
+ # with:
+ # path: |
+ # server_monitor_*.7z
+
+ # - name: Draft Release
+ # uses: softprops/action-gh-release@v1
+ # if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ # with:
+ # files: server_monitor_win32.7z
+ # draft: true
+ # tag_name: Alpha
+
+ Upload-Release:
+ permissions: write-all
+ if: ${{ github.ref_type=='branch' }}
+ needs:
+ [
+ linux64_build,
+ linux32_build,
+ armv7_build,
+ armhf_build,
+ aarch64_build,
+# macos_build,
+# windows64_build,
+# windows32_build,
+ ]
+ runs-on: ubuntu-latest
+ steps:
+ - name: Delete current release assets
+ uses: andreaswilli/delete-release-assets-action@v2.0.0
+ with:
+ github_token: ${{ secrets.GITHUB_TOKEN }}
+ tag: Alpha
+ deleteOnlyFromDrafts: false
+
+ - uses: actions/download-artifact@v3
+ with:
+ name: artifact
+ path: bin/
+
+ - name: Display structure of downloaded files
+ run: ls -R
+ working-directory: bin
+
+ - name: Tag Repo
+ uses: richardsimko/update-tag@v1.0.6
+ with:
+ tag_name: Alpha
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Upload Release
+ uses: softprops/action-gh-release@v1
+ if: ${{ success() }}
+ with:
+ tag: Alpha
+ tag_name: Alpha
+ files: bin/*
+ generate_release_notes: true
diff --git a/build-script/build.alpine.release.sh b/build-script/build.alpine.release.sh
new file mode 100644
index 0000000..bc382fe
--- /dev/null
+++ b/build-script/build.alpine.release.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+set -xe
+
+apk upgrade -U
+
+apk del py3-packaging
+apk add gcc python3-dev py3-psutil py3-wheel g++ build-base linux-headers zlib-dev cmake make autoconf automake libtool
+pip3 install -r requirements.txt
+pip3 install pyinstaller==5.13.2
+
+pyinstaller --onefile report.py
+
+cd build
+chmod +rx report
+chmod +r ./*
+cd ..
+mv build monitor
diff --git a/linux/Test_report.py b/linux/Test_report.py
deleted file mode 100644
index ad5ec5b..0000000
--- a/linux/Test_report.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from report import *
-
-print("running processes")
-print(get_process_num())
-print("\ncpu info")
-print(get_cpu_info())
-print(get_cpu_core())
-print(get_cpu_load())
-print(get_cpu_temp())
-print("\nip")
-print(get_ipv4())
-print(get_ipv6())
-print("\nconnection")
-print(get_connections())
-print("\nuptime")
-print(get_uptime())
-print("\nsystem version")
-print(get_sys_version())
-print("\nMem & Disk")
-print(get_mem_info())
-print(get_disk_info())
diff --git a/linux/report.py b/linux/report.py
deleted file mode 100644
index 9c313da..0000000
--- a/linux/report.py
+++ /dev/null
@@ -1,262 +0,0 @@
-import hashlib
-import json
-import math
-import re
-import redis
-import subprocess
-import time
-import requests
-import psutil
-import uuid
-import os
-
-HOST = "redis-host"
-PORT = "redis-port"
-PASSWORD = "redis-password"
-UUID = str(uuid.uuid4()).replace("-", "")
-IP = "0.0.0.0"
-TIME = math.floor(time.time())
-TIMEOUT = 259200
-TIME_PERIOD = 86400
-DISK_EXCLUDE = ['/run', '/sys', '/boot', '/dev', '/proc', '/gdrive', '/var/lib/']
-
-conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
-
-if os.path.isfile('.uuid'):
- with open('.uuid', 'r') as fp:
- UUID = fp.read().strip()
-else:
- with open('.uuid', 'w') as fp:
- fp.write(UUID)
-
-def get_network():
- network = {'RX': {
- 'bytes': psutil.net_io_counters().bytes_recv,
- 'packets': psutil.net_io_counters().packets_recv,
- }, 'TX': {
- 'bytes': psutil.net_io_counters().bytes_sent,
- 'packets': psutil.net_io_counters().packets_sent,
- }}
-
- if conn.exists("system_monitor:collection:network:tmp:" + UUID):
- net0 = json.loads(conn.get("system_monitor:collection:network:tmp:" + UUID))
- if network['RX']['packets'] > net0['RX']['packets'] and network['RX']['bytes'] > net0['RX']['bytes'] and \
- network['TX']['packets'] > net0['TX']['packets'] and network['TX']['bytes'] > net0['TX']['bytes']:
- conn.zadd("system_monitor:collection:network:RX:" + UUID,
- {json.dumps(
- {"time": TIME, "value": '{},{}'.format(network['RX']['packets'] - net0['RX']['packets'],
- network['RX']['bytes'] - net0['RX']['bytes'])}): TIME})
- conn.zadd("system_monitor:collection:network:TX:" + UUID,
- {json.dumps(
- {"time": TIME, "value": '{},{}'.format(network['TX']['packets'] - net0['TX']['packets'],
- network['TX']['bytes'] - net0['TX']['bytes'])}): TIME})
-
- conn.set("system_monitor:collection:network:tmp:" + UUID,
- json.dumps(network), TIME_PERIOD)
- return network
-
-
-def get_process_num():
- # n-2 '''
- p = subprocess.Popen("ps aux | wc -l", shell=True, stdout=subprocess.PIPE)
- return b2s(p.stdout.readline().strip())
-
-
-def get_cpu_info():
- # core modelname mhz'''
- p = subprocess.Popen("cat /proc/cpuinfo | egrep 'Processor|name' | cut -f2 -d: |uniq", shell=True, stdout=subprocess.PIPE)
- return b2s(p.stdout.readline().strip())
-
-
-def get_cpu_core():
- # core modelname mhz'''
- return str(psutil.cpu_count())
-
-
-def get_cpu_temp():
- # core modelname mhz'''
- p = subprocess.Popen("cat /sys/devices/virtual/thermal/thermal_zone0/temp", shell=True, stdout=subprocess.PIPE)
- try:
- tmp = b2s(p.stdout.readline().strip())
- conn.zadd("system_monitor:collection:thermal:" + UUID,
- {json.dumps({"time": TIME, "value": str(int(tmp)*1.0/1000)}): TIME})
- return tmp
- except:
- return 0
-
-
-def get_mem_info():
- info = {'Mem': {
- 'total': format_MB(psutil.virtual_memory().total),
- 'used': format_MB(psutil.virtual_memory().used),
- 'free': format_MB(psutil.virtual_memory().free),
- }, 'Swap': {
- 'total': format_MB(psutil.swap_memory().total),
- 'used': format_MB(psutil.swap_memory().used),
- 'free': format_MB(psutil.swap_memory().free)
- }}
-
- conn.zadd("system_monitor:collection:memory:" + UUID,
- {json.dumps({"time": TIME, "value": format_MB(psutil.virtual_memory().used)}): TIME})
-
- conn.zadd("system_monitor:collection:swap:" + UUID,
- {json.dumps({"time": TIME, "value": format_MB(psutil.swap_memory().used)}): TIME})
- return info
-
-
-def get_sys_version():
- # System and version'''
- p = subprocess.Popen(". /usr/lib/os-release;echo $PRETTY_NAME", shell=True, stdout=subprocess.PIPE)
-
- return b2s(p.stdout.readline().strip())
-
-
-def get_disk_partitions():
- parts = psutil.disk_partitions(True)
- result = []
- for part in parts:
- result.append(part)
- for i in DISK_EXCLUDE:
- if part.mountpoint.find(i) != -1:
- result.remove(part)
- break
- return result
-
-
-def get_disk_info():
- # disk: total, usage, free, %'''
- disks = {}
-
- for partition in get_disk_partitions():
- disk = psutil.disk_usage(partition.mountpoint)
- disks[partition.mountpoint] = {
- 'total': format_MB(disk.total),
- 'used': format_MB(disk.used),
- 'free': format_MB(disk.free),
- 'percent': disk.percent
- }
-
- conn.zadd("system_monitor:collection:disk:" + UUID,
- {json.dumps({"time": TIME, "value": disks}): TIME})
- return disks
-
-
-def get_ipv4():
- # interface ipv4'''
- return fetch_url("http://v4.ipv6-test.com/api/myip.php")
-
-
-def get_ipv6():
- # interface ipv6
- return fetch_url("http://v6.ipv6-test.com/api/myip.php")
-
-
-def get_connections():
- # establish
- p = subprocess.Popen("netstat -na|grep ESTABLISHED|wc -l", shell=True, stdout=subprocess.PIPE)
- return b2s(p.stdout.readline().strip())
-
-
-def get_uptime():
- # uptime second
- return time.time() - psutil.boot_time()
-
-
-def get_cpu_load():
- # uptime second
- p = subprocess.Popen("uptime|awk -F'load average:' '{print $2}'", shell=True, stdout=subprocess.PIPE)
- tmp = b2s(p.stdout.readline().strip()).replace(' ', '').split(",")
- load = {
- '1min': tmp[0],
- '5min': tmp[1],
- '15min': tmp[2]
- }
- conn.zadd("system_monitor:collection:cpu:" + UUID,
- {json.dumps({"time": TIME, "value": str(psutil.cpu_percent())}): TIME})
- return load
-
-
-def get_aggragate_json():
- info = {
- 'Connection': get_connections(),
- 'Disk': get_disk_info(),
- 'Uptime': get_uptime(),
- 'Memory': get_mem_info(),
- 'Load': get_cpu_load(),
- 'Process': get_process_num(),
- 'Network': get_network(),
- 'Thermal': get_cpu_temp()
- }
- return json.dumps(info)
-
-
-def b2s(b):
- return str(b, encoding="utf-8")
-
-
-def report():
- conn.set("system_monitor:stat:" + UUID, get_aggragate_json())
-
-
-def delete_offline():
- with conn.pipeline(transaction=False) as p:
- try:
- for k, v in conn.hgetall("system_monitor:hashes").items():
- hashes = bytes.decode(k)
- ip = bytes.decode(v)
- if conn.exsits("system_monitor:info:" + hashes):
- p.zremrangebyscore("system_monitor:collection:cpu:" + hashes, 0, TIME - TIME_PERIOD)
- p.zremrangebyscore("system_monitor:collection:disk:" + hashes, 0, TIME - TIME_PERIOD)
- p.zremrangebyscore("system_monitor:collection:memory:" + hashes, 0, TIME - TIME_PERIOD)
- p.zremrangebyscore("system_monitor:collection:swap:" + hashes, 0, TIME - TIME_PERIOD)
- p.zremrangebyscore("system_monitor:collection:network:RX:" + hashes, 0, TIME - TIME_PERIOD)
- p.zremrangebyscore("system_monitor:collection:network:TX:" + hashes, 0, TIME - TIME_PERIOD)
-
- if TIME - float(conn.hmget("system_monitor:info:" + hashes, 'Update Time')[0]) > TIMEOUT:
- p.hdel("system_monitor:hashes", hashes)
- p.delete("system_monitor:info:" + hashes)
- p.delete("system_monitor:stat:" + hashes)
- p.execute()
- except:
- pass
-
-
-def format_MB(v):
- return '%.2f' % (v * 1.0 / 1048576)
-
-
-def fetch_url(url):
- i = 5
- while i > 0:
- try:
- resp = requests.get(url=url, timeout=5)
- if resp.status_code == 200:
- return resp.text
- except:
- i = i - 1
- return "None"
-
-
-def report_once():
- """ip"""
- global IP, TIME
- IP = get_ipv4()
-
- info = {
- "CPU": get_cpu_core() + "x " + get_cpu_info(),
- "System Version": get_sys_version(),
- "IPV4": re.sub("[0-9]*\\.[0-9]*\\.[0-9]*", "*.*.*", get_ipv4()),
- "IPV6": re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()),
- "Update Time": TIME
- }
- with conn.pipeline(transaction=False) as pipeline:
- pipeline.hmset("system_monitor:hashes", {UUID: IP})
- pipeline.hmset("system_monitor:info:" + UUID, info)
- pipeline.execute()
-
- report()
- delete_offline()
- conn.close()
-
-
-report_once()
diff --git a/report.py b/report.py
new file mode 100644
index 0000000..0353a6d
--- /dev/null
+++ b/report.py
@@ -0,0 +1,264 @@
+import json
+import math
+import re
+import redis
+import logging
+import time
+import requests
+import psutil
+import uuid
+import os
+import cpuinfo
+import distro
+import platform
+
+HOST = ""
+PORT = ""
+PASSWORD = ""
+IPV4_API = "http://v4.ipv6-test.com/api/myip.php"
+IPV6_API = "http://v6.ipv6-test.com/api/myip.php"
+IP_API = "http://ip-api.com/json?fields=country,countryCode"
+REPORT_TIME = 60
+logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s")
+
+
+UUID = str(uuid.uuid4()).replace("-", "")
+IPV4 = None
+IPV6 = None
+COUNTRY = None
+conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
+TIME = math.floor(time.time())
+TIMEOUT = 259200
+RETENTION_TIME = 86400
+CPU_INFO = cpuinfo.get_cpu_info()
+DISK_EXCLUDE = ['/run', '/sys', '/boot', '/dev', '/proc', '/gdrive', '/var/lib']
+DISK_FS_EXCLUDE = ['tmpfs', 'overlay']
+NET_FORMER = psutil.net_io_counters()
+
+
+def get_network():
+ global NET_FORMER
+ net_temp = psutil.net_io_counters()
+
+ network = {'RX': {
+ 'bytes': net_temp.bytes_recv - NET_FORMER.bytes_recv,
+ 'packets': net_temp.packets_recv - NET_FORMER.packets_recv,
+ }, 'TX': {
+ 'bytes': net_temp.bytes_sent - NET_FORMER.bytes_sent,
+ 'packets': net_temp.packets_sent - NET_FORMER.packets_sent,
+ }}
+
+ NET_FORMER = net_temp
+ return network
+
+
+def get_process_num():
+ return len(psutil.pids())
+
+
+def get_cpu_name():
+ return CPU_INFO['brand_raw']
+
+
+def get_cpu_core():
+ # core modelname mhz'''
+ return str(psutil.cpu_count())
+
+
+def get_temp():
+ # thermal temp
+ result = {}
+ try:
+ for sensor_type, sensors in psutil.sensors_temperatures().items():
+ for sensor in sensors:
+ result[sensor_type+":"+sensor.label] = sensor.current
+ except: pass
+ return result
+
+
+def get_mem_info():
+ info = {'Mem': {
+ 'total': '%.2f' % (psutil.virtual_memory().total*1.0/1048576),
+ 'used': '%.2f' % (psutil.virtual_memory().used*1.0/1048576),
+ 'free': '%.2f' % (psutil.virtual_memory().free*1.0/1048576),
+ 'percent': psutil.virtual_memory().percent,
+ }, 'Swap': {
+ 'total': '%.2f' % (psutil.swap_memory().total*1.0/1048576),
+ 'used': '%.2f' % (psutil.swap_memory().used*1.0/1048576),
+ 'free': '%.2f' % (psutil.swap_memory().free*1.0/1048576),
+ 'percent': psutil.swap_memory().percent,
+ }}
+ return info
+
+
+def get_sys_version():
+ # System and version'''
+ if distro.name() != "":
+ return " ".join([distro.name(), distro.version(), distro.codename()])
+ else:
+ return " ".join([platform.system(), platform.release()])
+
+
+def get_disk_partitions():
+ parts = psutil.disk_partitions(True)
+ result = []
+ for part in parts:
+ result.append(part)
+ for i in DISK_EXCLUDE:
+ if part.mountpoint.find(i) != -1 or part.fstype in DISK_FS_EXCLUDE:
+ result.remove(part)
+ break
+
+ return result
+
+
+def get_disk_info():
+ disks = {}
+
+ for partition in get_disk_partitions():
+ disk = psutil.disk_usage(partition.mountpoint)
+ disks[partition.mountpoint] = {
+ 'total': '%.2f' % (disk.total*1.0/1048576),
+ 'used': '%.2f' % (disk.used*1.0/1048576),
+ 'free': '%.2f' % (disk.free*1.0/1048576),
+ 'percent': disk.percent
+ }
+
+ return disks
+
+
+def get_ipv4():
+ # interface ipv4'''
+ global IPV4
+ if IPV4 is None:
+ i = 5
+ while i > 0:
+ try:
+ resp = requests.get(url=IPV4_API, timeout=5)
+ if resp.status_code == 200:
+ IPV4 = resp.text
+ return IPV4
+ except:
+ i = i - 1
+
+ return "None"
+ else:
+ return IPV4
+
+
+def get_ipv6():
+ # interface ipv6
+ global IPV6
+ if IPV6 is None:
+ i = 5
+ while i > 0:
+ try:
+ resp = requests.get(url=IPV6_API, timeout=5)
+ if resp.status_code == 200:
+ return resp.text
+ except:
+ i = i - 1
+
+ return "None"
+ else:
+ return IPV6
+
+
+def get_country():
+ # interface ipv6
+ global COUNTRY
+ if COUNTRY is None:
+ i = 5
+ while i > 0:
+ try:
+ resp = requests.get(url=IP_API, timeout=5)
+ if resp.status_code == 200:
+ j = resp.json()
+ return (j["country"], j["countryCode"])
+ except:
+ i = i - 1
+
+ return ("None", "None")
+ else:
+ return COUNTRY
+
+
+def get_connections():
+ return len(psutil.net_connections())
+
+
+def get_uptime():
+ t = time.time() - psutil.boot_time()
+ return "%02d:%02d:%02d" % (int(t / 3600), int(t / 60 % 60), int(t % 60))
+
+
+def get_load():
+ return dict(psutil.cpu_times_percent()._asdict())
+
+
+def get_aggregate_stat_json():
+ return json.dumps(get_aggregate_stat())
+
+
+def get_aggregate_stat():
+ info = {
+ 'Disk': get_disk_info(),
+ 'Memory': get_mem_info(),
+ 'Load': get_load(),
+ 'Network': get_network(),
+ 'Thermal': get_temp()
+ }
+ logging.debug(info)
+ return info
+
+
+def report_once():
+ """ip"""
+ global IP, TIME
+ logging.info("Reporting...")
+ IP = get_ipv4()
+ TIME = time.time()
+ COUNTRY = get_country()
+ info = {
+ "CPU": "{}x {} @{}".format(CPU_INFO['count'], CPU_INFO.get('brand_raw', CPU_INFO.get('arch', 'Unknown')), CPU_INFO['hz_advertised_friendly']),
+ "System Version": get_sys_version(),
+ "IPV4": re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()),
+ "IPV6": re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()),
+ 'Uptime': get_uptime(),
+ 'Connection': get_connections(),
+ 'Process': get_process_num(),
+ "Update Time": TIME,
+ "Country": COUNTRY[0],
+ "Country Code": COUNTRY[1],
+ "Throughput": "D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
+ }
+ logging.debug(info)
+
+ with conn.pipeline(transaction=False) as pipeline:
+ pipeline.hmset(name="system_monitor:hashes", mapping={UUID: IP})
+ pipeline.hmset(name="system_monitor:info:" + UUID, mapping=info)
+ pipeline.zadd("system_monitor:collection:" + UUID, {get_aggregate_stat_json(): TIME})
+ pipeline.zremrangebyscore("system_monitor:collection:" + UUID, 0, TIME - RETENTION_TIME)
+ pipeline.expire("system_monitor:nodes", TIMEOUT)
+ pipeline.expire("system_monitor:hashes", TIMEOUT)
+ pipeline.expire("system_monitor:info:" + UUID, TIMEOUT)
+ pipeline.expire("system_monitor:collection:" + UUID, TIMEOUT)
+ pipeline.execute()
+
+ logging.info("Finish Reporting!")
+
+
+if os.path.isfile('.uuid'):
+ with open('.uuid', 'r') as fp:
+ UUID = fp.read().strip()
+else:
+ with open('.uuid', 'w') as fp:
+ fp.write(UUID)
+
+
+while True:
+ try:
+ report_once()
+ except Exception as e:
+ logging.error(e)
+ time.sleep(REPORT_TIME)
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..6a30761
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,5 @@
+py-cpuinfo
+distro
+psutil
+requests
+redis
From 63852c7304832690396c17aef891b78ecd4c1725 Mon Sep 17 00:00:00 2001
From: LittleJake <13583702+LittleJake@users.noreply.github.com>
Date: Fri, 8 Dec 2023 19:29:37 +0800
Subject: [PATCH 10/62] Update
---
.env | 14 ++++++++++
.github/workflows/build.yml | 21 --------------
build-script/build.alpine.release.sh | 8 ++----
report.py | 41 ++++++++++++++--------------
4 files changed, 38 insertions(+), 46 deletions(-)
create mode 100644 .env
diff --git a/.env b/.env
new file mode 100644
index 0000000..9395b57
--- /dev/null
+++ b/.env
@@ -0,0 +1,14 @@
+HOST=127.0.0.1
+PORT=6379
+PASSWORD=""
+
+IPV4_API="http://v4.ipv6-test.com/api/myip.php"
+IPV6_API="http://v6.ipv6-test.com/api/myip.php"
+IP_API="http://ip-api.com/json?fields=country,countryCode"
+
+REPORT_TIME=60
+TIMEOUT=259200
+RETENTION_TIME=86400
+
+DISK_EXCLUDE=/run,/sys,/boot,/dev,/proc,/gdrive,/var/lib
+DISK_FS_EXCLUDE=tmpfs,overlay
\ No newline at end of file
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 80d3719..7c89cff 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -15,9 +15,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- # - name: Add commit id into version
- # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
- # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:x86-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
@@ -43,9 +40,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- # - name: Add commit id into version
- # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
- # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:amd64-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
@@ -71,11 +65,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
-# - name: Set up QEMU
-# uses: docker/setup-qemu-action@v2
- # - name: Add commit id into version
- # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
- # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
@@ -102,11 +91,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
-# - name: Set up QEMU
-# uses: docker/setup-qemu-action@v2
- # - name: Add commit id into version
- # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
- # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
@@ -133,11 +117,6 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
-# - name: Set up QEMU
-# uses: docker/setup-qemu-action@v2
- # - name: Add commit id into version
- # if: ${{ !startsWith(github.ref, 'refs/tags/') }}
- # run: SHA=$(git rev-parse --short HEAD) && sed -i 's/\(v[0-9]\.[0-9]\.[0-9]\)/\1-'"$SHA"'/' src/version.h
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
diff --git a/build-script/build.alpine.release.sh b/build-script/build.alpine.release.sh
index bc382fe..9f88fc2 100644
--- a/build-script/build.alpine.release.sh
+++ b/build-script/build.alpine.release.sh
@@ -1,17 +1,15 @@
#!/bin/bash
set -xe
-apk upgrade -U
-
-apk del py3-packaging
apk add gcc python3-dev py3-psutil py3-wheel g++ build-base linux-headers zlib-dev cmake make autoconf automake libtool
pip3 install -r requirements.txt
+
pip3 install pyinstaller==5.13.2
pyinstaller --onefile report.py
-cd build
+cd dist
chmod +rx report
chmod +r ./*
cd ..
-mv build monitor
+mv dist monitor
diff --git a/report.py b/report.py
index 0353a6d..19c40bf 100644
--- a/report.py
+++ b/report.py
@@ -11,30 +11,31 @@
import cpuinfo
import distro
import platform
+from dotenv import load_dotenv
+
+load_dotenv()
+
+HOST = os.getenv("HOST", "127.0.0.1")
+PORT = os.getenv("PORT", "6379")
+PASSWORD = os.getenv("PASSWORD", "")
+IPV4_API = os.getenv('IPV4_API', "http://v4.ipv6-test.com/api/myip.php")
+IPV6_API = os.getenv('IPV6_API', "http://v6.ipv6-test.com/api/myip.php")
+IP_API = os.getenv('IP_API', "http://ip-api.com/json?fields=country,countryCode")
+REPORT_INTEVAL = int(os.getenv('REPORT_INTEVAL', '60'))
+DATA_TIMEOUT = int(os.getenv('DATA_TIMEOUT', '259200'))
+RETENTION_TIME = int(os.getenv('RETENTION_TIME', '86400'))
+DISK_EXCLUDE = os.getenv('DISK_EXCLUDE','/run,/sys,/boot,/dev,/proc,/var/lib').split(",")
+DISK_FS_EXCLUDE = os.getenv('DISK_FS_EXCLUDE', 'tmpfs,overlay').split(",")
-HOST = ""
-PORT = ""
-PASSWORD = ""
-IPV4_API = "http://v4.ipv6-test.com/api/myip.php"
-IPV6_API = "http://v6.ipv6-test.com/api/myip.php"
-IP_API = "http://ip-api.com/json?fields=country,countryCode"
-REPORT_TIME = 60
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s")
-
-
UUID = str(uuid.uuid4()).replace("-", "")
IPV4 = None
IPV6 = None
COUNTRY = None
conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
TIME = math.floor(time.time())
-TIMEOUT = 259200
-RETENTION_TIME = 86400
-CPU_INFO = cpuinfo.get_cpu_info()
-DISK_EXCLUDE = ['/run', '/sys', '/boot', '/dev', '/proc', '/gdrive', '/var/lib']
-DISK_FS_EXCLUDE = ['tmpfs', 'overlay']
NET_FORMER = psutil.net_io_counters()
-
+CPU_INFO = cpuinfo.get_cpu_info()
def get_network():
global NET_FORMER
@@ -239,10 +240,10 @@ def report_once():
pipeline.hmset(name="system_monitor:info:" + UUID, mapping=info)
pipeline.zadd("system_monitor:collection:" + UUID, {get_aggregate_stat_json(): TIME})
pipeline.zremrangebyscore("system_monitor:collection:" + UUID, 0, TIME - RETENTION_TIME)
- pipeline.expire("system_monitor:nodes", TIMEOUT)
- pipeline.expire("system_monitor:hashes", TIMEOUT)
- pipeline.expire("system_monitor:info:" + UUID, TIMEOUT)
- pipeline.expire("system_monitor:collection:" + UUID, TIMEOUT)
+ pipeline.expire("system_monitor:nodes", DATA_TIMEOUT)
+ pipeline.expire("system_monitor:hashes", DATA_TIMEOUT)
+ pipeline.expire("system_monitor:info:" + UUID, DATA_TIMEOUT)
+ pipeline.expire("system_monitor:collection:" + UUID, DATA_TIMEOUT)
pipeline.execute()
logging.info("Finish Reporting!")
@@ -261,4 +262,4 @@ def report_once():
report_once()
except Exception as e:
logging.error(e)
- time.sleep(REPORT_TIME)
+ time.sleep(REPORT_INTEVAL)
From bd6790ace80460aa43f4b28ddd11f708bb6d52e1 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 10:24:12 +0800
Subject: [PATCH 11/62] Update
---
.github/workflows/build.yml | 10 +++++-----
build-script/build.alpine.release.sh | 15 ---------------
build-script/build.release.sh | 16 ++++++++++++++++
3 files changed, 21 insertions(+), 20 deletions(-)
delete mode 100644 build-script/build.alpine.release.sh
create mode 100644 build-script/build.release.sh
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 7c89cff..80f3a96 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -16,7 +16,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Build
- run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:x86-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+ run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:x86-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_linux32.tar.gz monitor
@@ -41,7 +41,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Build
- run: docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:amd64-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+ run: docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:amd64-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_linux64.tar.gz monitor
@@ -68,7 +68,7 @@ jobs:
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
- docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:armv7-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:armv7-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_armv7.tar.gz monitor
@@ -94,7 +94,7 @@ jobs:
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
- docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:aarch64-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:aarch64-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_aarch64.tar.gz monitor
@@ -120,7 +120,7 @@ jobs:
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
- docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/alpine:armhf-latest-stable /bin/sh -c "apk add bash git python3 py3-pip && cd /root/workdir && chmod +x build-script/build.alpine.release.sh && bash build-script/build.alpine.release.sh"
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:armhf-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_armhf.tar.gz monitor
diff --git a/build-script/build.alpine.release.sh b/build-script/build.alpine.release.sh
deleted file mode 100644
index 9f88fc2..0000000
--- a/build-script/build.alpine.release.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-set -xe
-
-apk add gcc python3-dev py3-psutil py3-wheel g++ build-base linux-headers zlib-dev cmake make autoconf automake libtool
-pip3 install -r requirements.txt
-
-pip3 install pyinstaller==5.13.2
-
-pyinstaller --onefile report.py
-
-cd dist
-chmod +rx report
-chmod +r ./*
-cd ..
-mv dist monitor
diff --git a/build-script/build.release.sh b/build-script/build.release.sh
new file mode 100644
index 0000000..2410b1a
--- /dev/null
+++ b/build-script/build.release.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+set -xe
+
+apt install -y gcc python3-dev python3-psutil python3-wheel g++ build-base linux-headers python3-setuptools
+
+pip3 install -r requirements.txt
+
+pip3 install pyinstaller
+
+pyinstaller --onefile report.py
+
+cd dist
+chmod +rx report
+chmod +r ./*
+cd ..
+mv dist monitor
From 7fc6edfeea543b427889fbe10ce64d91a07fa691 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 10:26:59 +0800
Subject: [PATCH 12/62] Update
---
.github/workflows/build.yml | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 80f3a96..b7219ec 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -16,7 +16,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Build
- run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:x86-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:x86-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_linux32.tar.gz monitor
@@ -41,7 +41,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Build
- run: docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:amd64-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ run: docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:amd64-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_linux64.tar.gz monitor
@@ -68,7 +68,7 @@ jobs:
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
- docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:armv7-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:armv7-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_armv7.tar.gz monitor
@@ -94,7 +94,7 @@ jobs:
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
- docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:aarch64-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:aarch64-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_aarch64.tar.gz monitor
@@ -120,7 +120,7 @@ jobs:
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
- docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:armhf-focal /bin/sh -c "apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:armhf-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_armhf.tar.gz monitor
From 4e0bcafd489254e9ce5278d168d924e5a9d5670c Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 10:32:51 +0800
Subject: [PATCH 13/62] Update
---
.github/workflows/build.yml | 64 +++++++++++++++++------------------
build-script/build.release.sh | 2 +-
2 files changed, 33 insertions(+), 33 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index b7219ec..479b266 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -16,7 +16,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Build
- run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:x86-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:i386-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_linux32.tar.gz monitor
@@ -60,17 +60,43 @@ jobs:
draft: true
tag_name: Alpha
- armv7_build:
- name: Linux armv7 Build
+ # armv7_build:
+ # name: Linux armv7 Build
+ # runs-on: ubuntu-latest
+ # steps:
+ # - uses: actions/checkout@v3
+ # - name: Build
+ # run: |
+ # docker run --rm --privileged multiarch/qemu-user-static:register --reset
+ # docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:armv7-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ # - name: Package Release
+ # run: tar czf server_monitor_armv7.tar.gz monitor
+
+ # - name: Upload
+ # uses: actions/upload-artifact@v3
+ # with:
+ # path: |
+ # server_monitor_*.tar.gz
+
+ # - name: Draft Release
+ # uses: softprops/action-gh-release@v1
+ # if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
+ # with:
+ # files: server_monitor_armv7.tar.gz
+ # draft: true
+ # tag_name: Alpha
+
+ arm64_build:
+ name: Linux arm64 Build
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Build
run: |
docker run --rm --privileged multiarch/qemu-user-static:register --reset
- docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:armv7-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:arm64-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
- run: tar czf server_monitor_armv7.tar.gz monitor
+ run: tar czf server_monitor_arm64.tar.gz monitor
- name: Upload
uses: actions/upload-artifact@v3
@@ -82,33 +108,7 @@ jobs:
uses: softprops/action-gh-release@v1
if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
with:
- files: server_monitor_armv7.tar.gz
- draft: true
- tag_name: Alpha
-
- aarch64_build:
- name: Linux aarch64 Build
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - name: Build
- run: |
- docker run --rm --privileged multiarch/qemu-user-static:register --reset
- docker run -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:aarch64-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- - name: Package Release
- run: tar czf server_monitor_aarch64.tar.gz monitor
-
- - name: Upload
- uses: actions/upload-artifact@v3
- with:
- path: |
- server_monitor_*.tar.gz
-
- - name: Draft Release
- uses: softprops/action-gh-release@v1
- if: ${{ github.event_name != 'pull_request' && startsWith(github.ref, 'refs/tags/') }}
- with:
- files: server_monitor_aarch64.tar.gz
+ files: server_monitor_arm64.tar.gz
draft: true
tag_name: Alpha
diff --git a/build-script/build.release.sh b/build-script/build.release.sh
index 2410b1a..35fda0f 100644
--- a/build-script/build.release.sh
+++ b/build-script/build.release.sh
@@ -1,7 +1,7 @@
#!/bin/bash
set -xe
-apt install -y gcc python3-dev python3-psutil python3-wheel g++ build-base linux-headers python3-setuptools
+apt install -y gcc python3-dev python3-psutil python3-wheel g++ python3-setuptools
pip3 install -r requirements.txt
From 47929048ffa5e3e73c27a407aca536372522321a Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:24:10 +0800
Subject: [PATCH 14/62] Update
---
.github/workflows/build.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 479b266..30f1164 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -253,9 +253,9 @@ jobs:
[
linux64_build,
linux32_build,
- armv7_build,
+# armv7_build,
armhf_build,
- aarch64_build,
+ arm64_build,
# macos_build,
# windows64_build,
# windows32_build,
From e6b18523d8c146009bca6fdb61ea6f453ad87bfc Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:28:14 +0800
Subject: [PATCH 15/62] Update
---
.github/workflows/build.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 30f1164..5666fb7 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -16,7 +16,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Build
- run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:i386-focal /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:i386-bionic /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_linux32.tar.gz monitor
From 27fdc89c27673ee92152716b70e555e418a6ac1d Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:33:48 +0800
Subject: [PATCH 16/62] Update
---
.github/workflows/build.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 5666fb7..955c4b0 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -16,7 +16,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Build
- run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:i386-bionic /bin/sh -c "apt update && apt install -y bash git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:i386-bionic /bin/sh -c "apt update && apt install -y bash zlib-devel make cmake git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_linux32.tar.gz monitor
From 25bc2ac297871e35115a8df511b8eca28508c80e Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:36:39 +0800
Subject: [PATCH 17/62] Update
---
.github/workflows/build.yml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 955c4b0..d31b98c 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -16,7 +16,7 @@ jobs:
steps:
- uses: actions/checkout@v3
- name: Build
- run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:i386-bionic /bin/sh -c "apt update && apt install -y bash zlib-devel make cmake git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
+ run: docker run --rm -v $GITHUB_WORKSPACE:/root/workdir multiarch/ubuntu-core:i386-bionic /bin/sh -c "apt update && apt install -y bash zlib1g zlib1g-dev make cmake git python3 python3-pip && cd /root/workdir && chmod +x build-script/build.release.sh && bash build-script/build.release.sh"
- name: Package Release
run: tar czf server_monitor_linux32.tar.gz monitor
From 0102b2619c2cebea641be1731e6aeb9186694d23 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:38:12 +0800
Subject: [PATCH 18/62] Update
---
requirements.txt | 1 +
1 file changed, 1 insertion(+)
diff --git a/requirements.txt b/requirements.txt
index 6a30761..df55d8e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,3 +3,4 @@ distro
psutil
requests
redis
+dotenv
\ No newline at end of file
From b9599f4a6a1ef2eafc9d9b7f2b0a1b5098bea162 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:43:17 +0800
Subject: [PATCH 19/62] Update
---
build-script/build.release.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/build-script/build.release.sh b/build-script/build.release.sh
index 35fda0f..f5b3785 100644
--- a/build-script/build.release.sh
+++ b/build-script/build.release.sh
@@ -1,7 +1,7 @@
#!/bin/bash
set -xe
-apt install -y gcc python3-dev python3-psutil python3-wheel g++ python3-setuptools
+apt install -y gcc python3-dev python3-psutil python3-dotenv python3-wheel g++ python3-setuptools
pip3 install -r requirements.txt
From 039489fbbb5def44e10ba21fd332b669f90078e6 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:46:21 +0800
Subject: [PATCH 20/62] Update
---
requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index df55d8e..75ad8dd 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,4 +3,4 @@ distro
psutil
requests
redis
-dotenv
\ No newline at end of file
+#dotenv
\ No newline at end of file
From 3db5452a33a1fe8e87f7d97b2ede20af163ad2e5 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:47:03 +0800
Subject: [PATCH 21/62] Update
---
requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index 75ad8dd..7cd096f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,4 +3,4 @@ distro
psutil
requests
redis
-#dotenv
\ No newline at end of file
+python-dotenv
\ No newline at end of file
From 8ef1f33d8f34a385aef451816e5a42db884947c4 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 15:57:55 +0800
Subject: [PATCH 22/62] Update
---
build-script/build.release.sh | 1 +
1 file changed, 1 insertion(+)
diff --git a/build-script/build.release.sh b/build-script/build.release.sh
index f5b3785..a7d8210 100644
--- a/build-script/build.release.sh
+++ b/build-script/build.release.sh
@@ -13,4 +13,5 @@ cd dist
chmod +rx report
chmod +r ./*
cd ..
+mv .env dist/
mv dist monitor
From 9df652d1a923e6ac3d1f5bb48f6e93ffb09eee34 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 16:06:02 +0800
Subject: [PATCH 23/62] Update
---
report.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/report.py b/report.py
index 19c40bf..98f628b 100644
--- a/report.py
+++ b/report.py
@@ -11,9 +11,9 @@
import cpuinfo
import distro
import platform
-from dotenv import load_dotenv
+from dotenv import load_dotenv, find_dotenv
-load_dotenv()
+load_dotenv(find_dotenv())
HOST = os.getenv("HOST", "127.0.0.1")
PORT = os.getenv("PORT", "6379")
From fb3052c8a8980fa585bfcd7f135700212b6f4e24 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 16:11:06 +0800
Subject: [PATCH 24/62] Update
---
.env => .env.example | 0
.gitignore | 3 ++-
build-script/build.release.sh | 2 +-
3 files changed, 3 insertions(+), 2 deletions(-)
rename .env => .env.example (100%)
diff --git a/.env b/.env.example
similarity index 100%
rename from .env
rename to .env.example
diff --git a/.gitignore b/.gitignore
index 723ef36..3bf780b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,2 @@
-.idea
\ No newline at end of file
+.idea
+.env
\ No newline at end of file
diff --git a/build-script/build.release.sh b/build-script/build.release.sh
index a7d8210..dfcacf5 100644
--- a/build-script/build.release.sh
+++ b/build-script/build.release.sh
@@ -13,5 +13,5 @@ cd dist
chmod +rx report
chmod +r ./*
cd ..
-mv .env dist/
+mv .env.example dist/
mv dist monitor
From 9c1be2f199af6048c1c8f210dc7b84279d07b66e Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 11 Dec 2023 16:52:10 +0800
Subject: [PATCH 25/62] Update
---
.gitignore | 5 ++++-
report.py | 7 ++++++-
2 files changed, 10 insertions(+), 2 deletions(-)
diff --git a/.gitignore b/.gitignore
index 3bf780b..1c4ebb6 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,5 @@
.idea
-.env
\ No newline at end of file
+.env
+.uuid
+dist/
+build/
\ No newline at end of file
diff --git a/report.py b/report.py
index 98f628b..7f5cfee 100644
--- a/report.py
+++ b/report.py
@@ -13,7 +13,12 @@
import platform
from dotenv import load_dotenv, find_dotenv
-load_dotenv(find_dotenv())
+# get .env location for pyinstaller
+extDataDir = os.getcwd()
+if getattr(sys, 'frozen', False):
+ extDataDir = sys._MEIPASS
+load_dotenv(dotenv_path=os.path.join(extDataDir, '.env'))
+
HOST = os.getenv("HOST", "127.0.0.1")
PORT = os.getenv("PORT", "6379")
From 76bb456afbb45801ba499c39e5d5e13b39b531ef Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Thu, 18 Jan 2024 12:56:36 +0800
Subject: [PATCH 26/62] adding service install shell
---
install_service_linux.sh | 25 +++++++++++++++++++++++++
1 file changed, 25 insertions(+)
create mode 100644 install_service_linux.sh
diff --git a/install_service_linux.sh b/install_service_linux.sh
new file mode 100644
index 0000000..2ea636a
--- /dev/null
+++ b/install_service_linux.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+mkdir -p /usr/local/monitor
+\cp -f report.py /usr/local/monitor
+\cp -f .env /usr/local/monitor
+
+cat > /lib/systemd/system/monitor.service << EOF
+[Unit]
+Description=monitor
+After=network.target
+
+[Service]
+User=root
+ExecStart=/usr/bin/python3 /usr/local/monitor/r.py
+Restart=always
+
+[Install]
+WantedBy=multi-user.target
+
+EOF
+
+systemctl daemon-reload
+systemctl start monitor
+systemctl enable monitor
+
From bf0256e4fe9f8cb9dc5d356287c52b60220f6c49 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Thu, 18 Jan 2024 13:02:04 +0800
Subject: [PATCH 27/62] fix import
---
report.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/report.py b/report.py
index 7f5cfee..717d20a 100644
--- a/report.py
+++ b/report.py
@@ -1,3 +1,4 @@
+import sys
import json
import math
import re
From 66964c73a2b7ab336733b20479dc61a672601b53 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Thu, 18 Jan 2024 13:05:37 +0800
Subject: [PATCH 28/62] fix
---
install_service_linux.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/install_service_linux.sh b/install_service_linux.sh
index 2ea636a..c80db28 100644
--- a/install_service_linux.sh
+++ b/install_service_linux.sh
@@ -11,7 +11,7 @@ After=network.target
[Service]
User=root
-ExecStart=/usr/bin/python3 /usr/local/monitor/r.py
+ExecStart=/usr/bin/python3 /usr/local/monitor/report.py
Restart=always
[Install]
From cb83cdb023f5de7261e171ff8e3464ea4a663026 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Thu, 18 Jan 2024 13:12:34 +0800
Subject: [PATCH 29/62] fix env
---
install_service_linux.sh | 1 +
1 file changed, 1 insertion(+)
diff --git a/install_service_linux.sh b/install_service_linux.sh
index c80db28..cb72b6a 100644
--- a/install_service_linux.sh
+++ b/install_service_linux.sh
@@ -12,6 +12,7 @@ After=network.target
[Service]
User=root
ExecStart=/usr/bin/python3 /usr/local/monitor/report.py
+WorkingDirectory=/usr/local/monitor/
Restart=always
[Install]
From a8214bd75b8a3bda89271e46f14161af888a8aa9 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Fri, 19 Jan 2024 16:05:52 +0800
Subject: [PATCH 30/62] Update 1. adding battery support 2. shell
---
install_service_linux.sh | 49 ++++++++++++++++++++++++++++++++--------
report.py | 32 +++++++++++++++++++-------
2 files changed, 64 insertions(+), 17 deletions(-)
diff --git a/install_service_linux.sh b/install_service_linux.sh
index cb72b6a..e82dd30 100644
--- a/install_service_linux.sh
+++ b/install_service_linux.sh
@@ -1,12 +1,34 @@
#!/bin/bash
-mkdir -p /usr/local/monitor
-\cp -f report.py /usr/local/monitor
-\cp -f .env /usr/local/monitor
+case `uname` in
+ Linux )
+ LINUX=1
+ which apk && {
+ echo "Alpine"
+ mkdir -p /usr/local/monitor
+ \cp -f .env /usr/local/monitor
+ \cp -f report.py /usr/local/monitor
-cat > /lib/systemd/system/monitor.service << EOF
+ cat > /etc/local.d/monitor.start << EOF
+cd /usr/local/monitor/
+nohup /usr/bin/python3 /usr/local/monitor/report.py &
+
+EOF
+
+ chmod +x /etc/local.d/monitor.start
+ rc-update add local
+ rc-service local start
+ return
+ }
+ (which yum || which apt-get) && {
+ echo "CentOS or Debian"
+ mkdir -p /usr/local/monitor
+ \cp -f report.py /usr/local/monitor
+ \cp -f .env /usr/local/monitor
+
+ cat > /lib/systemd/system/monitor.service << EOF
[Unit]
-Description=monitor
+Description=server monitor
After=network.target
[Service]
@@ -19,8 +41,17 @@ Restart=always
WantedBy=multi-user.target
EOF
-
-systemctl daemon-reload
-systemctl start monitor
-systemctl enable monitor
+ systemctl daemon-reload
+ systemctl start monitor
+ systemctl enable monitor
+ return
+ }
+ ;;
+ Darwin )
+ DARWIN=1
+ ;;
+ * )
+ # Handle AmigaOS, CPM, and modified cable modems.
+ ;;
+esac
diff --git a/report.py b/report.py
index 717d20a..a245ad8 100644
--- a/report.py
+++ b/report.py
@@ -12,6 +12,7 @@
import cpuinfo
import distro
import platform
+from datetime import timedelta
from dotenv import load_dotenv, find_dotenv
# get .env location for pyinstaller
@@ -66,6 +67,8 @@ def get_process_num():
def get_cpu_name():
return CPU_INFO['brand_raw']
+def get_load_average():
+ return "%.2f, %.2f, %.2f" % (psutil.getloadavg())
def get_cpu_core():
# core modelname mhz'''
@@ -76,12 +79,21 @@ def get_temp():
# thermal temp
result = {}
try:
- for sensor_type, sensors in psutil.sensors_temperatures().items():
- for sensor in sensors:
- result[sensor_type+":"+sensor.label] = sensor.current
- except: pass
+ for sensor_type, sensors in psutil.sensors_temperatures().items():
+ for sensor in sensors:
+ result[sensor_type+":"+sensor.label] = sensor.current
+ except:
+ pass
return result
+def get_battery():
+ # battery temp
+ result = {}
+ try:
+ result["percent"] = psutil.sensors_battery().percent
+ except:
+ pass
+ return result
def get_mem_info():
info = {'Mem': {
@@ -180,8 +192,9 @@ def get_country():
try:
resp = requests.get(url=IP_API, timeout=5)
if resp.status_code == 200:
- j = resp.json()
- return (j["country"], j["countryCode"])
+ j = resp.json()
+ COUNTRY = (j["country"], j["countryCode"])
+ return COUNTRY
except:
i = i - 1
@@ -196,7 +209,8 @@ def get_connections():
def get_uptime():
t = time.time() - psutil.boot_time()
- return "%02d:%02d:%02d" % (int(t / 3600), int(t / 60 % 60), int(t % 60))
+ delta = timedelta(seconds=t)
+ return str(delta)
def get_load():
@@ -213,7 +227,8 @@ def get_aggregate_stat():
'Memory': get_mem_info(),
'Load': get_load(),
'Network': get_network(),
- 'Thermal': get_temp()
+ 'Thermal': get_temp(),
+ 'Battery': get_battery(),
}
logging.debug(info)
return info
@@ -234,6 +249,7 @@ def report_once():
'Uptime': get_uptime(),
'Connection': get_connections(),
'Process': get_process_num(),
+ 'Load Average': get_load_average(),
"Update Time": TIME,
"Country": COUNTRY[0],
"Country Code": COUNTRY[1],
From 3e9c85d247f096cff09a00102ed24a88d1d30ce3 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 22 Jan 2024 21:21:19 +0800
Subject: [PATCH 31/62] Update README.md
---
README.md | 54 +++++++++++++++++++++++++++++++++++++++++++++++++++++-
1 file changed, 53 insertions(+), 1 deletion(-)
diff --git a/README.md b/README.md
index f951220..2037bfa 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,54 @@
-# server-monitor-script
server-monitor-script
+=======
+
+
+
+
+
+[Server Monitor](https://github.com/LittleJake/server-monitor/)的探针节点脚本,支持多种系统。
+
+数据上报至Redis服务器,Linux、Windows已通过测试。
+
+可注册使用免费Redis服务器:[redislab](https://redis.com/)、[aiven.io](https://console.aiven.io/)。
+
+### 安装
+
+#### Linux
+
+```bash
+git clone https://github.com/LittleJake/server-monitor-script/
+
+pip3 install -r requirements.txt
+
+# 编辑.env.example文件保存为.env文件
+cp .env.example .env
+vim .env
+
+# 安装服务CentOS/Debian/Ubuntu
+bash install_service_linux.sh
+
+```
+
+#### Windows
+
+```cmd
+git clone https://github.com/LittleJake/server-monitor-script/
+
+pip3 install -r requirements.txt
+# 编辑.env.example文件保存为.env文件
+
+# 运行服务
+python3 report.py
+
+```
+
+
+### Sponsors
+
+Thanks for the amazing VM server provided by [DartNode](https://dartnode.com?via=1).
+
+
+
+Thanks for the open source project license provided by [JetBrains](https://www.jetbrains.com/).
+
+
From 32184fe05f23d50e0cbafb6db33c6443350aa990 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 22 Jan 2024 21:24:04 +0800
Subject: [PATCH 32/62] Update README.md
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 2037bfa..91d423f 100644
--- a/README.md
+++ b/README.md
@@ -3,7 +3,7 @@ server-monitor-script
-
+
[Server Monitor](https://github.com/LittleJake/server-monitor/)的探针节点脚本,支持多种系统。
From 90bd43940ce22d3adfdb377da273a1fd92b07f6b Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 22 Jan 2024 21:25:48 +0800
Subject: [PATCH 33/62] Update README.md
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 91d423f..76a4a95 100644
--- a/README.md
+++ b/README.md
@@ -1,7 +1,7 @@
server-monitor-script
=======
-
+
From ffba9a0e90a01e1fef8adb305560735cde86d07d Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Tue, 23 Jan 2024 23:01:19 +0800
Subject: [PATCH 34/62] Update report.py
---
report.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/report.py b/report.py
index a245ad8..0b6a63f 100644
--- a/report.py
+++ b/report.py
@@ -68,7 +68,10 @@ def get_cpu_name():
return CPU_INFO['brand_raw']
def get_load_average():
- return "%.2f, %.2f, %.2f" % (psutil.getloadavg())
+ try:
+ avg = psutil.getloadavg()
+ except: return "Python version not support"
+ return "%.2f, %.2f, %.2f" % ()
def get_cpu_core():
# core modelname mhz'''
From ed2e83bf1a98bec37e54588ab6e5d0f3694e0cce Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Tue, 23 Jan 2024 15:28:33 +0000
Subject: [PATCH 35/62] Update SSL & fix bug
---
.env.example | 5 +++--
report.py | 24 +++++++++++++++++++-----
2 files changed, 22 insertions(+), 7 deletions(-)
diff --git a/.env.example b/.env.example
index 9395b57..f8d2054 100644
--- a/.env.example
+++ b/.env.example
@@ -1,9 +1,10 @@
HOST=127.0.0.1
PORT=6379
PASSWORD=""
+SSL=False
-IPV4_API="http://v4.ipv6-test.com/api/myip.php"
-IPV6_API="http://v6.ipv6-test.com/api/myip.php"
+IPV4_API="https://v4.ipv6-test.com/api/myip.php"
+IPV6_API="https://v6.ipv6-test.com/api/myip.php"
IP_API="http://ip-api.com/json?fields=country,countryCode"
REPORT_TIME=60
diff --git a/report.py b/report.py
index 0b6a63f..ae5ceff 100644
--- a/report.py
+++ b/report.py
@@ -25,6 +25,7 @@
HOST = os.getenv("HOST", "127.0.0.1")
PORT = os.getenv("PORT", "6379")
PASSWORD = os.getenv("PASSWORD", "")
+SSL = os.getenv("SSL", 'False').lower() in ('true', '1', 't')
IPV4_API = os.getenv('IPV4_API', "http://v4.ipv6-test.com/api/myip.php")
IPV6_API = os.getenv('IPV6_API', "http://v6.ipv6-test.com/api/myip.php")
IP_API = os.getenv('IP_API', "http://ip-api.com/json?fields=country,countryCode")
@@ -39,7 +40,7 @@
IPV4 = None
IPV6 = None
COUNTRY = None
-conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, retry_on_timeout=10)
+conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, ssl=SSL, retry_on_timeout=10)
TIME = math.floor(time.time())
NET_FORMER = psutil.net_io_counters()
CPU_INFO = cpuinfo.get_cpu_info()
@@ -71,7 +72,7 @@ def get_load_average():
try:
avg = psutil.getloadavg()
except: return "Python version not support"
- return "%.2f, %.2f, %.2f" % ()
+ return "%.2f, %.2f, %.2f" % avg
def get_cpu_core():
# core modelname mhz'''
@@ -244,6 +245,19 @@ def report_once():
IP = get_ipv4()
TIME = time.time()
COUNTRY = get_country()
+ logging.debug("{}x {} @{}".format(CPU_INFO['count'], CPU_INFO.get('brand_raw', CPU_INFO.get('arch', 'Unknown')), CPU_INFO['hz_advertised_friendly']))
+ logging.debug(get_sys_version())
+ logging.debug(re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()))
+ logging.debug(re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()))
+ logging.debug(get_uptime())
+ logging.debug(get_connections())
+ logging.debug(get_process_num())
+ logging.debug(get_load_average())
+ logging.debug(TIME)
+ logging.debug(COUNTRY[0])
+ logging.debug(COUNTRY[1])
+ logging.debug("D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824))
+
info = {
"CPU": "{}x {} @{}".format(CPU_INFO['count'], CPU_INFO.get('brand_raw', CPU_INFO.get('arch', 'Unknown')), CPU_INFO['hz_advertised_friendly']),
"System Version": get_sys_version(),
@@ -258,11 +272,11 @@ def report_once():
"Country Code": COUNTRY[1],
"Throughput": "D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
}
- logging.debug(info)
+
with conn.pipeline(transaction=False) as pipeline:
- pipeline.hmset(name="system_monitor:hashes", mapping={UUID: IP})
- pipeline.hmset(name="system_monitor:info:" + UUID, mapping=info)
+ pipeline.hset(name="system_monitor:hashes", mapping={UUID: IP})
+ pipeline.hset(name="system_monitor:info:" + UUID, mapping=info)
pipeline.zadd("system_monitor:collection:" + UUID, {get_aggregate_stat_json(): TIME})
pipeline.zremrangebyscore("system_monitor:collection:" + UUID, 0, TIME - RETENTION_TIME)
pipeline.expire("system_monitor:nodes", DATA_TIMEOUT)
From d08392d3b2e4ab51832d63c789ef5db59c690f33 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Tue, 23 Jan 2024 15:33:18 +0000
Subject: [PATCH 36/62] fix shell
---
install_service_linux.sh | 2 --
1 file changed, 2 deletions(-)
diff --git a/install_service_linux.sh b/install_service_linux.sh
index e82dd30..bbb6fb6 100644
--- a/install_service_linux.sh
+++ b/install_service_linux.sh
@@ -18,7 +18,6 @@ EOF
chmod +x /etc/local.d/monitor.start
rc-update add local
rc-service local start
- return
}
(which yum || which apt-get) && {
echo "CentOS or Debian"
@@ -44,7 +43,6 @@ EOF
systemctl daemon-reload
systemctl start monitor
systemctl enable monitor
- return
}
;;
Darwin )
From 9b7bf9d72dd444cf6a2406749590f871ad8c285d Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Tue, 23 Jan 2024 15:49:01 +0000
Subject: [PATCH 37/62] amend country
---
report.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/report.py b/report.py
index ae5ceff..fc15aeb 100644
--- a/report.py
+++ b/report.py
@@ -269,7 +269,7 @@ def report_once():
'Load Average': get_load_average(),
"Update Time": TIME,
"Country": COUNTRY[0],
- "Country Code": COUNTRY[1],
+ "Country Code": "CN" if COUNTRY[1] in ("TW", "HK", "MO") else COUNTRY[1],
"Throughput": "D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
}
From e50946adc0db95eff7903c2957fe9c5c377010e7 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Tue, 23 Jan 2024 15:52:12 +0000
Subject: [PATCH 38/62] amend uptime
---
report.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/report.py b/report.py
index fc15aeb..31a9f54 100644
--- a/report.py
+++ b/report.py
@@ -212,7 +212,7 @@ def get_connections():
def get_uptime():
- t = time.time() - psutil.boot_time()
+ t = int(time.time() - psutil.boot_time())
delta = timedelta(seconds=t)
return str(delta)
From 362e093a01f8542016776697710d6e0339ccf5ba Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 24 Jan 2024 11:03:18 +0800
Subject: [PATCH 39/62] Fixing termux (root mode)
---
report.py | 9 ++++++---
1 file changed, 6 insertions(+), 3 deletions(-)
diff --git a/report.py b/report.py
index 31a9f54..2b2e9a8 100644
--- a/report.py
+++ b/report.py
@@ -71,7 +71,7 @@ def get_cpu_name():
def get_load_average():
try:
avg = psutil.getloadavg()
- except: return "Python version not support"
+ except: return "Not support"
return "%.2f, %.2f, %.2f" % avg
def get_cpu_core():
@@ -216,6 +216,9 @@ def get_uptime():
delta = timedelta(seconds=t)
return str(delta)
+def get_cpu_freq():
+ return CPU_INFO.get('hz_advertised_friendly', "%.2f Ghz" % (psutil.cpu_freq().max / 1000))
+
def get_load():
return dict(psutil.cpu_times_percent()._asdict())
@@ -245,7 +248,7 @@ def report_once():
IP = get_ipv4()
TIME = time.time()
COUNTRY = get_country()
- logging.debug("{}x {} @{}".format(CPU_INFO['count'], CPU_INFO.get('brand_raw', CPU_INFO.get('arch', 'Unknown')), CPU_INFO['hz_advertised_friendly']))
+ logging.debug("{}x {} @{}".format(get_cpu_core(), CPU_INFO.get('brand_raw', CPU_INFO.get('arch_string_raw', 'Unknown')), get_cpu_freq()))
logging.debug(get_sys_version())
logging.debug(re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()))
logging.debug(re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()))
@@ -259,7 +262,7 @@ def report_once():
logging.debug("D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824))
info = {
- "CPU": "{}x {} @{}".format(CPU_INFO['count'], CPU_INFO.get('brand_raw', CPU_INFO.get('arch', 'Unknown')), CPU_INFO['hz_advertised_friendly']),
+ "CPU": "{}x {} @{}".format(get_cpu_core(), CPU_INFO.get('brand_raw', CPU_INFO.get('arch_string_raw', 'Unknown')), get_cpu_freq()),
"System Version": get_sys_version(),
"IPV4": re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()),
"IPV6": re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()),
From 7effcd243c6faab883644c98e3f2ea51e19f0c53 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 24 Jan 2024 11:38:06 +0800
Subject: [PATCH 40/62] adding opts exclude
---
.env.example | 3 ++-
report.py | 3 ++-
2 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/.env.example b/.env.example
index f8d2054..72d5d6f 100644
--- a/.env.example
+++ b/.env.example
@@ -12,4 +12,5 @@ TIMEOUT=259200
RETENTION_TIME=86400
DISK_EXCLUDE=/run,/sys,/boot,/dev,/proc,/gdrive,/var/lib
-DISK_FS_EXCLUDE=tmpfs,overlay
\ No newline at end of file
+DISK_FS_EXCLUDE=tmpfs,overlay
+DISK_OPTS_EXCLUDE=ro
\ No newline at end of file
diff --git a/report.py b/report.py
index 2b2e9a8..423c71c 100644
--- a/report.py
+++ b/report.py
@@ -34,6 +34,7 @@
RETENTION_TIME = int(os.getenv('RETENTION_TIME', '86400'))
DISK_EXCLUDE = os.getenv('DISK_EXCLUDE','/run,/sys,/boot,/dev,/proc,/var/lib').split(",")
DISK_FS_EXCLUDE = os.getenv('DISK_FS_EXCLUDE', 'tmpfs,overlay').split(",")
+DISK_OPTS_EXCLUDE = os.getenv('DISK_OPTS_EXCLUDE', 'ro').split(",")
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s")
UUID = str(uuid.uuid4()).replace("-", "")
@@ -128,7 +129,7 @@ def get_disk_partitions():
for part in parts:
result.append(part)
for i in DISK_EXCLUDE:
- if part.mountpoint.find(i) != -1 or part.fstype in DISK_FS_EXCLUDE:
+ if part.mountpoint.find(i) != -1 or part.fstype in DISK_FS_EXCLUDE or len(set(part.opts.split(",")) & set(DISK_OPTS_EXCLUDE)) > 0:
result.remove(part)
break
From 8a934a3aa813f0cd5f0e7d0ef719d8ef5ff9eea6 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 24 Jan 2024 13:07:14 +0800
Subject: [PATCH 41/62] Update
---
report.py | 127 +++++++++++++++++++++++++++---------------------------
1 file changed, 64 insertions(+), 63 deletions(-)
diff --git a/report.py b/report.py
index 423c71c..637005e 100644
--- a/report.py
+++ b/report.py
@@ -21,7 +21,7 @@
extDataDir = sys._MEIPASS
load_dotenv(dotenv_path=os.path.join(extDataDir, '.env'))
-
+# get .env
HOST = os.getenv("HOST", "127.0.0.1")
PORT = os.getenv("PORT", "6379")
PASSWORD = os.getenv("PASSWORD", "")
@@ -36,8 +36,16 @@
DISK_FS_EXCLUDE = os.getenv('DISK_FS_EXCLUDE', 'tmpfs,overlay').split(",")
DISK_OPTS_EXCLUDE = os.getenv('DISK_OPTS_EXCLUDE', 'ro').split(",")
+
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s")
UUID = str(uuid.uuid4()).replace("-", "")
+if os.path.isfile('.uuid'):
+ with open('.uuid', 'r') as fp:
+ UUID = fp.read().strip()
+else:
+ with open('.uuid', 'w') as fp:
+ fp.write(UUID)
+
IPV4 = None
IPV6 = None
COUNTRY = None
@@ -51,11 +59,11 @@ def get_network():
net_temp = psutil.net_io_counters()
network = {'RX': {
- 'bytes': net_temp.bytes_recv - NET_FORMER.bytes_recv,
- 'packets': net_temp.packets_recv - NET_FORMER.packets_recv,
+ 'bytes': (net_temp.bytes_recv - NET_FORMER.bytes_recv) if (net_temp.bytes_recv - NET_FORMER.bytes_recv) > 0 else 0,
+ 'packets': (net_temp.packets_recv - NET_FORMER.packets_recv) if (net_temp.packets_recv - NET_FORMER.packets_recv) > 0 else 0,
}, 'TX': {
- 'bytes': net_temp.bytes_sent - NET_FORMER.bytes_sent,
- 'packets': net_temp.packets_sent - NET_FORMER.packets_sent,
+ 'bytes': (net_temp.bytes_sent - NET_FORMER.bytes_sent) if (net_temp.bytes_sent - NET_FORMER.bytes_sent) > 0 else 0,
+ 'packets': (net_temp.packets_sent - NET_FORMER.packets_sent) if (net_temp.packets_sent - NET_FORMER.packets_sent) > 0 else 0,
}}
NET_FORMER = net_temp
@@ -67,7 +75,7 @@ def get_process_num():
def get_cpu_name():
- return CPU_INFO['brand_raw']
+ return CPU_INFO.get('brand_raw', CPU_INFO.get('arch_string_raw', 'Unknown'))
def get_load_average():
try:
@@ -76,7 +84,6 @@ def get_load_average():
return "%.2f, %.2f, %.2f" % avg
def get_cpu_core():
- # core modelname mhz'''
return str(psutil.cpu_count())
@@ -127,11 +134,13 @@ def get_disk_partitions():
parts = psutil.disk_partitions(True)
result = []
for part in parts:
- result.append(part)
- for i in DISK_EXCLUDE:
- if part.mountpoint.find(i) != -1 or part.fstype in DISK_FS_EXCLUDE or len(set(part.opts.split(",")) & set(DISK_OPTS_EXCLUDE)) > 0:
- result.remove(part)
- break
+ try:
+ result.append(part)
+ for i in DISK_EXCLUDE:
+ if part.mountpoint.find(i) != -1 or part.fstype in DISK_FS_EXCLUDE or len(set(part.opts.split(",")) & set(DISK_OPTS_EXCLUDE)) > 0:
+ result.remove(part)
+ break
+ except: result.remove(part)
return result
@@ -140,31 +149,40 @@ def get_disk_info():
disks = {}
for partition in get_disk_partitions():
- disk = psutil.disk_usage(partition.mountpoint)
- disks[partition.mountpoint] = {
- 'total': '%.2f' % (disk.total*1.0/1048576),
- 'used': '%.2f' % (disk.used*1.0/1048576),
- 'free': '%.2f' % (disk.free*1.0/1048576),
- 'percent': disk.percent
- }
+ try:
+ disk = psutil.disk_usage(partition.mountpoint)
+ disks[partition.mountpoint] = {
+ 'total': '%.2f' % (disk.total*1.0/1048576),
+ 'used': '%.2f' % (disk.used*1.0/1048576),
+ 'free': '%.2f' % (disk.free*1.0/1048576),
+ 'percent': disk.percent
+ }
+ except Exception as e: logging.error(e)
return disks
+def get_request(url=''):
+ i = 5
+ while i > 0:
+ try:
+ resp = requests.get(url=url, timeout=5)
+ if resp.status_code == 200:
+ return resp
+ except:
+ i = i - 1
+
+ return None
+
def get_ipv4():
- # interface ipv4'''
+ # interface ipv4
global IPV4
if IPV4 is None:
- i = 5
- while i > 0:
- try:
- resp = requests.get(url=IPV4_API, timeout=5)
- if resp.status_code == 200:
- IPV4 = resp.text
- return IPV4
- except:
- i = i - 1
-
+ resp = get_request(IPV4_API)
+ if resp is not None:
+ IPV4 = resp.text
+ return IPV4
+
return "None"
else:
return IPV4
@@ -174,36 +192,26 @@ def get_ipv6():
# interface ipv6
global IPV6
if IPV6 is None:
- i = 5
- while i > 0:
- try:
- resp = requests.get(url=IPV6_API, timeout=5)
- if resp.status_code == 200:
- return resp.text
- except:
- i = i - 1
-
+ resp = get_request(IPV6_API)
+ if resp is not None:
+ IPV6 = resp.text
+ return IPV6
+
return "None"
else:
return IPV6
def get_country():
- # interface ipv6
global COUNTRY
if COUNTRY is None:
- i = 5
- while i > 0:
- try:
- resp = requests.get(url=IP_API, timeout=5)
- if resp.status_code == 200:
- j = resp.json()
- COUNTRY = (j["country"], j["countryCode"])
- return COUNTRY
- except:
- i = i - 1
-
- return ("None", "None")
+ resp = get_request(IP_API)
+ if resp is not None:
+ j = resp.json()
+ COUNTRY = (j["country"], j["countryCode"])
+ return COUNTRY
+
+ return ("Unknown", "Unknown")
else:
return COUNTRY
@@ -249,7 +257,7 @@ def report_once():
IP = get_ipv4()
TIME = time.time()
COUNTRY = get_country()
- logging.debug("{}x {} @{}".format(get_cpu_core(), CPU_INFO.get('brand_raw', CPU_INFO.get('arch_string_raw', 'Unknown')), get_cpu_freq()))
+ logging.debug("{}x {} @{}".format(get_cpu_core(), get_cpu_name(), get_cpu_freq()))
logging.debug(get_sys_version())
logging.debug(re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()))
logging.debug(re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()))
@@ -263,7 +271,7 @@ def report_once():
logging.debug("D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824))
info = {
- "CPU": "{}x {} @{}".format(get_cpu_core(), CPU_INFO.get('brand_raw', CPU_INFO.get('arch_string_raw', 'Unknown')), get_cpu_freq()),
+ "CPU": "{}x {} @{}".format(get_cpu_core(), get_cpu_name(), get_cpu_freq()),
"System Version": get_sys_version(),
"IPV4": re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()),
"IPV6": re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()),
@@ -274,7 +282,7 @@ def report_once():
"Update Time": TIME,
"Country": COUNTRY[0],
"Country Code": "CN" if COUNTRY[1] in ("TW", "HK", "MO") else COUNTRY[1],
- "Throughput": "D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
+ "Throughput": "↑%.2f GB / ↓%.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
}
@@ -282,6 +290,7 @@ def report_once():
pipeline.hset(name="system_monitor:hashes", mapping={UUID: IP})
pipeline.hset(name="system_monitor:info:" + UUID, mapping=info)
pipeline.zadd("system_monitor:collection:" + UUID, {get_aggregate_stat_json(): TIME})
+
pipeline.zremrangebyscore("system_monitor:collection:" + UUID, 0, TIME - RETENTION_TIME)
pipeline.expire("system_monitor:nodes", DATA_TIMEOUT)
pipeline.expire("system_monitor:hashes", DATA_TIMEOUT)
@@ -292,14 +301,6 @@ def report_once():
logging.info("Finish Reporting!")
-if os.path.isfile('.uuid'):
- with open('.uuid', 'r') as fp:
- UUID = fp.read().strip()
-else:
- with open('.uuid', 'w') as fp:
- fp.write(UUID)
-
-
while True:
try:
report_once()
From eeed71832b0f31e99e591143351b27d9d67a6d9a Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 24 Jan 2024 13:08:04 +0800
Subject: [PATCH 42/62] Update action
---
.github/workflows/build.yml | 3 ---
1 file changed, 3 deletions(-)
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index d31b98c..8a1bf01 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -1,9 +1,6 @@
name: GitHub CI
on:
- push:
- branches: [master]
workflow_dispatch:
- pull_request:
concurrency:
group: ${{ github.ref }}-${{ github.workflow }}
From 8d9add1be49f2266e743b9321737623ae351beef Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 24 Jan 2024 22:46:08 +0800
Subject: [PATCH 43/62] Update report.py
---
report.py | 7 ++-----
1 file changed, 2 insertions(+), 5 deletions(-)
diff --git a/report.py b/report.py
index 637005e..3470d1d 100644
--- a/report.py
+++ b/report.py
@@ -225,9 +225,6 @@ def get_uptime():
delta = timedelta(seconds=t)
return str(delta)
-def get_cpu_freq():
- return CPU_INFO.get('hz_advertised_friendly', "%.2f Ghz" % (psutil.cpu_freq().max / 1000))
-
def get_load():
return dict(psutil.cpu_times_percent()._asdict())
@@ -257,7 +254,7 @@ def report_once():
IP = get_ipv4()
TIME = time.time()
COUNTRY = get_country()
- logging.debug("{}x {} @{}".format(get_cpu_core(), get_cpu_name(), get_cpu_freq()))
+ logging.debug("{}x {}".format(get_cpu_core(), get_cpu_name()))
logging.debug(get_sys_version())
logging.debug(re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()))
logging.debug(re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()))
@@ -271,7 +268,7 @@ def report_once():
logging.debug("D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824))
info = {
- "CPU": "{}x {} @{}".format(get_cpu_core(), get_cpu_name(), get_cpu_freq()),
+ "CPU": "{}x {}".format(get_cpu_core(), get_cpu_name()),
"System Version": get_sys_version(),
"IPV4": re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()),
"IPV6": re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()),
From ee1c77dfdbc95aabfcc9c41d0e56d1ca78bfdb8e Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 24 Jan 2024 23:02:11 +0800
Subject: [PATCH 44/62] =?UTF-8?q?fix=20=E2=86=91=E2=86=93?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
report.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/report.py b/report.py
index 3470d1d..00e5a4b 100644
--- a/report.py
+++ b/report.py
@@ -279,7 +279,7 @@ def report_once():
"Update Time": TIME,
"Country": COUNTRY[0],
"Country Code": "CN" if COUNTRY[1] in ("TW", "HK", "MO") else COUNTRY[1],
- "Throughput": "↑%.2f GB / ↓%.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
+ "Throughput": "↓%.2f GB / ↑%.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
}
From 48e8f75f66280a9c7f22610355d4891d9187ea0c Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Sun, 4 Feb 2024 22:05:11 +0800
Subject: [PATCH 45/62] Update report.py
Fix var
---
report.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/report.py b/report.py
index 00e5a4b..90c7585 100644
--- a/report.py
+++ b/report.py
@@ -29,7 +29,7 @@
IPV4_API = os.getenv('IPV4_API', "http://v4.ipv6-test.com/api/myip.php")
IPV6_API = os.getenv('IPV6_API', "http://v6.ipv6-test.com/api/myip.php")
IP_API = os.getenv('IP_API', "http://ip-api.com/json?fields=country,countryCode")
-REPORT_INTEVAL = int(os.getenv('REPORT_INTEVAL', '60'))
+REPORT_TIME = int(os.getenv('REPORT_TIME', '60'))
DATA_TIMEOUT = int(os.getenv('DATA_TIMEOUT', '259200'))
RETENTION_TIME = int(os.getenv('RETENTION_TIME', '86400'))
DISK_EXCLUDE = os.getenv('DISK_EXCLUDE','/run,/sys,/boot,/dev,/proc,/var/lib').split(",")
@@ -303,4 +303,4 @@ def report_once():
report_once()
except Exception as e:
logging.error(e)
- time.sleep(REPORT_INTEVAL)
+ time.sleep(REPORT_TIME)
From 97bf6480ae6e4e91db39c7a86d254241daf1ff78 Mon Sep 17 00:00:00 2001
From: LittleJake <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 7 Feb 2024 22:08:21 +0800
Subject: [PATCH 46/62] implement http api.
---
.env.example | 5 +++++
report.py | 50 +++++++++++++++++++++++++++++++++++++-------------
2 files changed, 42 insertions(+), 13 deletions(-)
diff --git a/.env.example b/.env.example
index 72d5d6f..387d6cc 100644
--- a/.env.example
+++ b/.env.example
@@ -1,8 +1,13 @@
+REPORT_MODE=redis #http,redis
+
HOST=127.0.0.1
PORT=6379
PASSWORD=""
SSL=False
+SERVER_TOKEN=""
+SERVER_URL="http://127.0.0.1"
+
IPV4_API="https://v4.ipv6-test.com/api/myip.php"
IPV6_API="https://v6.ipv6-test.com/api/myip.php"
IP_API="http://ip-api.com/json?fields=country,countryCode"
diff --git a/report.py b/report.py
index 90c7585..5b608e1 100644
--- a/report.py
+++ b/report.py
@@ -35,6 +35,9 @@
DISK_EXCLUDE = os.getenv('DISK_EXCLUDE','/run,/sys,/boot,/dev,/proc,/var/lib').split(",")
DISK_FS_EXCLUDE = os.getenv('DISK_FS_EXCLUDE', 'tmpfs,overlay').split(",")
DISK_OPTS_EXCLUDE = os.getenv('DISK_OPTS_EXCLUDE', 'ro').split(",")
+SERVER_URL = os.getenv('SERVER_URL', "")
+REPORT_MODE = os.getenv('REPORT_MODE', "redis").lower()
+SERVER_TOKEN = os.getenv('SERVER_TOKEN', "")
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s")
@@ -46,14 +49,24 @@
with open('.uuid', 'w') as fp:
fp.write(UUID)
+print("Your UUID is: %s" % UUID)
+SERVER_URL_INFO = "%s/api/report/info/%s" % (SERVER_URL, UUID)
+SERVER_URL_COLLECTION = "%s/api/report/collection/%s" % (SERVER_URL, UUID)
+SERVER_URL_HASH = "%s/api/report/hash/%s" % (SERVER_URL, UUID)
+
+if REPORT_MODE == 'http' and SERVER_TOKEN == "":
+ print("Please generate server token using `php think token add --uuid ` on your central server.")
+ exit(-1)
+
IPV4 = None
IPV6 = None
COUNTRY = None
-conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, ssl=SSL, retry_on_timeout=10)
TIME = math.floor(time.time())
NET_FORMER = psutil.net_io_counters()
CPU_INFO = cpuinfo.get_cpu_info()
+conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, ssl=SSL, retry_on_timeout=10)
+
def get_network():
global NET_FORMER
net_temp = psutil.net_io_counters()
@@ -282,18 +295,28 @@ def report_once():
"Throughput": "↓%.2f GB / ↑%.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
}
-
- with conn.pipeline(transaction=False) as pipeline:
- pipeline.hset(name="system_monitor:hashes", mapping={UUID: IP})
- pipeline.hset(name="system_monitor:info:" + UUID, mapping=info)
- pipeline.zadd("system_monitor:collection:" + UUID, {get_aggregate_stat_json(): TIME})
-
- pipeline.zremrangebyscore("system_monitor:collection:" + UUID, 0, TIME - RETENTION_TIME)
- pipeline.expire("system_monitor:nodes", DATA_TIMEOUT)
- pipeline.expire("system_monitor:hashes", DATA_TIMEOUT)
- pipeline.expire("system_monitor:info:" + UUID, DATA_TIMEOUT)
- pipeline.expire("system_monitor:collection:" + UUID, DATA_TIMEOUT)
- pipeline.execute()
+ if REPORT_MODE == 'redis':
+ with conn.pipeline(transaction=False) as pipeline:
+ pipeline.hset(name="system_monitor:hashes", mapping={UUID: IP})
+ pipeline.hset(name="system_monitor:info:" + UUID, mapping=info)
+ pipeline.zadd("system_monitor:collection:" + UUID, {get_aggregate_stat_json(): TIME})
+
+ pipeline.zremrangebyscore("system_monitor:collection:" + UUID, 0, TIME - RETENTION_TIME)
+ pipeline.expire("system_monitor:hashes", DATA_TIMEOUT)
+ pipeline.expire("system_monitor:info:" + UUID, DATA_TIMEOUT)
+ pipeline.expire("system_monitor:collection:" + UUID, DATA_TIMEOUT)
+ pipeline.execute()
+
+ elif REPORT_MODE == 'http':
+ try:
+ req = requests.post(url=SERVER_URL_HASH, data={'ip': IPV4}, headers={'authorization': SERVER_TOKEN})
+ if req.status_code != 200: raise Exception(req)
+ req = requests.post(url=SERVER_URL_INFO, json=info, headers={'authorization': SERVER_TOKEN})
+ if req.status_code != 200: raise Exception(req)
+ req = requests.post(url=SERVER_URL_COLLECTION, json=get_aggregate_stat(), headers={'authorization': SERVER_TOKEN})
+ if req.status_code != 200: raise Exception(req)
+ except Exception as e:
+ raise Exception("[HTTP%d]: %s, %s" % (e.args[0].status_code, e.args[0].text, e.args[0].url))
logging.info("Finish Reporting!")
@@ -303,4 +326,5 @@ def report_once():
report_once()
except Exception as e:
logging.error(e)
+ logging.error("ERROR OCCUR.")
time.sleep(REPORT_TIME)
From 7a4ffc3c705337ebc0e444c7f2066f1463176b99 Mon Sep 17 00:00:00 2001
From: LittleJake <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 7 Feb 2024 22:35:01 +0800
Subject: [PATCH 47/62] Update exception
---
report.py | 6 ++----
1 file changed, 2 insertions(+), 4 deletions(-)
diff --git a/report.py b/report.py
index 5b608e1..e80575b 100644
--- a/report.py
+++ b/report.py
@@ -54,10 +54,6 @@
SERVER_URL_COLLECTION = "%s/api/report/collection/%s" % (SERVER_URL, UUID)
SERVER_URL_HASH = "%s/api/report/hash/%s" % (SERVER_URL, UUID)
-if REPORT_MODE == 'http' and SERVER_TOKEN == "":
- print("Please generate server token using `php think token add --uuid ` on your central server.")
- exit(-1)
-
IPV4 = None
IPV6 = None
COUNTRY = None
@@ -309,6 +305,8 @@ def report_once():
elif REPORT_MODE == 'http':
try:
+ if SERVER_TOKEN == "":
+ raise Exception("Please generate server token using `php think token add --uuid %s` on your central server." % UUID)
req = requests.post(url=SERVER_URL_HASH, data={'ip': IPV4}, headers={'authorization': SERVER_TOKEN})
if req.status_code != 200: raise Exception(req)
req = requests.post(url=SERVER_URL_INFO, json=info, headers={'authorization': SERVER_TOKEN})
From 905a19029735bd33333d8f2536d45124c6a9c6cf Mon Sep 17 00:00:00 2001
From: LittleJake <13583702+LittleJake@users.noreply.github.com>
Date: Wed, 7 Feb 2024 22:39:23 +0800
Subject: [PATCH 48/62] adding timeout
---
.env.example | 8 +++++---
report.py | 10 ++++++----
2 files changed, 11 insertions(+), 7 deletions(-)
diff --git a/.env.example b/.env.example
index 387d6cc..f830bb8 100644
--- a/.env.example
+++ b/.env.example
@@ -1,10 +1,15 @@
REPORT_MODE=redis #http,redis
+SOCKET_TIMEOUT=10
HOST=127.0.0.1
PORT=6379
PASSWORD=""
SSL=False
+REPORT_TIME=60
+TIMEOUT=259200
+RETENTION_TIME=86400
+
SERVER_TOKEN=""
SERVER_URL="http://127.0.0.1"
@@ -12,9 +17,6 @@ IPV4_API="https://v4.ipv6-test.com/api/myip.php"
IPV6_API="https://v6.ipv6-test.com/api/myip.php"
IP_API="http://ip-api.com/json?fields=country,countryCode"
-REPORT_TIME=60
-TIMEOUT=259200
-RETENTION_TIME=86400
DISK_EXCLUDE=/run,/sys,/boot,/dev,/proc,/gdrive,/var/lib
DISK_FS_EXCLUDE=tmpfs,overlay
diff --git a/report.py b/report.py
index e80575b..c5009da 100644
--- a/report.py
+++ b/report.py
@@ -38,6 +38,7 @@
SERVER_URL = os.getenv('SERVER_URL', "")
REPORT_MODE = os.getenv('REPORT_MODE', "redis").lower()
SERVER_TOKEN = os.getenv('SERVER_TOKEN', "")
+SOCKET_TIMEOUT = int(os.getenv('SOCKET_TIMEOUT', "10"))
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s")
@@ -61,7 +62,8 @@
NET_FORMER = psutil.net_io_counters()
CPU_INFO = cpuinfo.get_cpu_info()
-conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, ssl=SSL, retry_on_timeout=10)
+if REPORT_MODE == "redis":
+ conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, ssl=SSL, retry_on_timeout=SOCKET_TIMEOUT)
def get_network():
global NET_FORMER
@@ -307,11 +309,11 @@ def report_once():
try:
if SERVER_TOKEN == "":
raise Exception("Please generate server token using `php think token add --uuid %s` on your central server." % UUID)
- req = requests.post(url=SERVER_URL_HASH, data={'ip': IPV4}, headers={'authorization': SERVER_TOKEN})
+ req = requests.post(url=SERVER_URL_HASH, data={'ip': IPV4}, headers={'authorization': SERVER_TOKEN}, timeout=SOCKET_TIMEOUT)
if req.status_code != 200: raise Exception(req)
- req = requests.post(url=SERVER_URL_INFO, json=info, headers={'authorization': SERVER_TOKEN})
+ req = requests.post(url=SERVER_URL_INFO, json=info, headers={'authorization': SERVER_TOKEN}, timeout=SOCKET_TIMEOUT)
if req.status_code != 200: raise Exception(req)
- req = requests.post(url=SERVER_URL_COLLECTION, json=get_aggregate_stat(), headers={'authorization': SERVER_TOKEN})
+ req = requests.post(url=SERVER_URL_COLLECTION, json=get_aggregate_stat(), headers={'authorization': SERVER_TOKEN}, timeout=SOCKET_TIMEOUT)
if req.status_code != 200: raise Exception(req)
except Exception as e:
raise Exception("[HTTP%d]: %s, %s" % (e.args[0].status_code, e.args[0].text, e.args[0].url))
From 86c86bde6fb9f0148bd9365da9f66e0f82aa7886 Mon Sep 17 00:00:00 2001
From: LittleJake <13583702+LittleJake@users.noreply.github.com>
Date: Thu, 8 Feb 2024 23:55:06 +0800
Subject: [PATCH 49/62] update log
---
report.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/report.py b/report.py
index c5009da..0ba9763 100644
--- a/report.py
+++ b/report.py
@@ -50,7 +50,7 @@
with open('.uuid', 'w') as fp:
fp.write(UUID)
-print("Your UUID is: %s" % UUID)
+logging.info("Your UUID is: %s" % UUID)
SERVER_URL_INFO = "%s/api/report/info/%s" % (SERVER_URL, UUID)
SERVER_URL_COLLECTION = "%s/api/report/collection/%s" % (SERVER_URL, UUID)
SERVER_URL_HASH = "%s/api/report/hash/%s" % (SERVER_URL, UUID)
From 1f01c9934ce70e2f49492d8c1817c80201e4d44d Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Tue, 9 Apr 2024 10:30:31 +0800
Subject: [PATCH 50/62] Update ip address fetch
---
report.py | 18 ++++++++----------
1 file changed, 8 insertions(+), 10 deletions(-)
diff --git a/report.py b/report.py
index 0ba9763..970a1cb 100644
--- a/report.py
+++ b/report.py
@@ -192,11 +192,10 @@ def get_ipv4():
resp = get_request(IPV4_API)
if resp is not None:
IPV4 = resp.text
- return IPV4
-
- return "None"
- else:
- return IPV4
+ else:
+ IPV4 = "None"
+
+ return IPV4
def get_ipv6():
@@ -206,11 +205,10 @@ def get_ipv6():
resp = get_request(IPV6_API)
if resp is not None:
IPV6 = resp.text
- return IPV6
-
- return "None"
- else:
- return IPV6
+ else:
+ IPV6 = "None"
+
+ return IPV6
def get_country():
From d98473f6df61db840e3df73e945dbf4afaecb335 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Thu, 18 Apr 2024 16:01:30 +0800
Subject: [PATCH 51/62] Update
fixing none ip response exception.
---
report.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/report.py b/report.py
index 970a1cb..ff5ae7a 100644
--- a/report.py
+++ b/report.py
@@ -190,7 +190,7 @@ def get_ipv4():
global IPV4
if IPV4 is None:
resp = get_request(IPV4_API)
- if resp is not None:
+ if resp is not None and re.match("[0-9]*\.[0-9]*\.[0-9]*",resp.text) is not None:
IPV4 = resp.text
else:
IPV4 = "None"
@@ -203,7 +203,7 @@ def get_ipv6():
global IPV6
if IPV6 is None:
resp = get_request(IPV6_API)
- if resp is not None:
+ if resp is not None and re.match("[a-zA-Z0-9]*:",resp.text) is not None:
IPV6 = resp.text
else:
IPV6 = "None"
From 5075ed058dc6ca05380aafa4bd751331de1d6d32 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Thu, 18 Apr 2024 18:10:01 +0800
Subject: [PATCH 52/62] Update
1. adding debug level
2. adding disk io
3. adding fans
4. fix bug
---
report.py | 46 +++++++++++++++++++++++++++++++++++++++++-----
1 file changed, 41 insertions(+), 5 deletions(-)
diff --git a/report.py b/report.py
index ff5ae7a..603a464 100644
--- a/report.py
+++ b/report.py
@@ -22,6 +22,7 @@
load_dotenv(dotenv_path=os.path.join(extDataDir, '.env'))
# get .env
+DEBUG_LEVEL = os.getenv("DEBUG_LEVEL", '20')
HOST = os.getenv("HOST", "127.0.0.1")
PORT = os.getenv("PORT", "6379")
PASSWORD = os.getenv("PASSWORD", "")
@@ -41,7 +42,7 @@
SOCKET_TIMEOUT = int(os.getenv('SOCKET_TIMEOUT', "10"))
-logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s")
+logging.basicConfig(level=int(DEBUG_LEVEL), format="%(asctime)s - %(message)s")
UUID = str(uuid.uuid4()).replace("-", "")
if os.path.isfile('.uuid'):
with open('.uuid', 'r') as fp:
@@ -60,6 +61,7 @@
COUNTRY = None
TIME = math.floor(time.time())
NET_FORMER = psutil.net_io_counters()
+IO_FORMER = psutil.disk_io_counters()
CPU_INFO = cpuinfo.get_cpu_info()
if REPORT_MODE == "redis":
@@ -81,6 +83,23 @@ def get_network():
return network
+def get_io():
+ global IO_FORMER
+ io_temp = psutil.disk_io_counters()
+
+ io = {'read': {
+ 'count': (io_temp.read_count - IO_FORMER.read_count) if (io_temp.read_count - IO_FORMER.read_count) > 0 else 0,
+ 'bytes': (io_temp.read_bytes - IO_FORMER.read_bytes) if (io_temp.read_bytes - IO_FORMER.read_bytes) > 0 else 0,
+ 'time': (io_temp.read_time - IO_FORMER.read_time) if (io_temp.read_time - IO_FORMER.read_time) > 0 else 0,
+ }, 'write': {
+ 'count': (io_temp.write_count - IO_FORMER.write_count) if (io_temp.write_count - IO_FORMER.write_count) > 0 else 0,
+ 'bytes': (io_temp.write_bytes - IO_FORMER.write_bytes) if (io_temp.write_bytes - IO_FORMER.write_bytes) > 0 else 0,
+ 'time': (io_temp.write_time - IO_FORMER.write_time) if (io_temp.write_time - IO_FORMER.write_time) > 0 else 0,
+ }}
+
+ IO_FORMER = io_temp
+ return io
+
def get_process_num():
return len(psutil.pids())
@@ -118,6 +137,16 @@ def get_battery():
pass
return result
+def get_fan():
+ result = {}
+ try:
+ for sensor_type, sensors in psutil.sensors_fans().items():
+ for sensor in sensors:
+ result[sensor_type+":"+sensor.label] = sensor.current
+ except:
+ pass
+ return result
+
def get_mem_info():
info = {'Mem': {
'total': '%.2f' % (psutil.virtual_memory().total*1.0/1048576),
@@ -226,8 +255,14 @@ def get_country():
def get_connections():
- return len(psutil.net_connections())
+ return "TCP: %d, UDP: %d" % (len(psutil.net_connections("tcp")), len(psutil.net_connections("udp")))
+
+def get_throughput():
+ rx = NET_FORMER.bytes_recv/1073741824
+ tx = NET_FORMER.bytes_sent/1073741824
+ return "{} / {}".format("↓%.2f TB" % rx/1024 if rx > 1024 else "↓%.2f GB" % rx,
+ "↑%.2f TB" % tx/1024 if tx > 1024 else "↑%.2f GB" % tx)
def get_uptime():
t = int(time.time() - psutil.boot_time())
@@ -238,7 +273,6 @@ def get_uptime():
def get_load():
return dict(psutil.cpu_times_percent()._asdict())
-
def get_aggregate_stat_json():
return json.dumps(get_aggregate_stat())
@@ -251,6 +285,8 @@ def get_aggregate_stat():
'Network': get_network(),
'Thermal': get_temp(),
'Battery': get_battery(),
+ 'Fan': get_fan(),
+ 'IO': get_io(),
}
logging.debug(info)
return info
@@ -274,7 +310,7 @@ def report_once():
logging.debug(TIME)
logging.debug(COUNTRY[0])
logging.debug(COUNTRY[1])
- logging.debug("D: %.2f GB / U: %.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824))
+ logging.debug(get_throughput())
info = {
"CPU": "{}x {}".format(get_cpu_core(), get_cpu_name()),
@@ -288,7 +324,7 @@ def report_once():
"Update Time": TIME,
"Country": COUNTRY[0],
"Country Code": "CN" if COUNTRY[1] in ("TW", "HK", "MO") else COUNTRY[1],
- "Throughput": "↓%.2f GB / ↑%.2f GB" % (NET_FORMER.bytes_recv/1073741824, NET_FORMER.bytes_sent/1073741824),
+ "Throughput": get_throughput(),
}
if REPORT_MODE == 'redis':
From a2770495d69e51df599a074ae5350fc3532f1737 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Fri, 19 Apr 2024 00:25:06 +0800
Subject: [PATCH 53/62] Update report.py
fix type error
---
report.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/report.py b/report.py
index 603a464..3cb8cf6 100644
--- a/report.py
+++ b/report.py
@@ -261,8 +261,8 @@ def get_throughput():
rx = NET_FORMER.bytes_recv/1073741824
tx = NET_FORMER.bytes_sent/1073741824
- return "{} / {}".format("↓%.2f TB" % rx/1024 if rx > 1024 else "↓%.2f GB" % rx,
- "↑%.2f TB" % tx/1024 if tx > 1024 else "↑%.2f GB" % tx)
+ return "{} / {}".format("↓%.2f TB" % (rx/1024) if rx > 1024 else "↓%.2f GB" % rx,
+ "↑%.2f TB" % (tx/1024) if tx > 1024 else "↑%.2f GB" % tx)
def get_uptime():
t = int(time.time() - psutil.boot_time())
From 82576e2565fb54499f9f6809063f0c6616c69bf8 Mon Sep 17 00:00:00 2001
From: LittleJake <13583702+LittleJake@users.noreply.github.com>
Date: Sun, 5 May 2024 17:33:11 +0800
Subject: [PATCH 54/62] Fix no IO data
---
report.py | 30 ++++++++++++++++++++++++++----
1 file changed, 26 insertions(+), 4 deletions(-)
diff --git a/report.py b/report.py
index 3cb8cf6..86dc62c 100644
--- a/report.py
+++ b/report.py
@@ -60,16 +60,31 @@
IPV6 = None
COUNTRY = None
TIME = math.floor(time.time())
-NET_FORMER = psutil.net_io_counters()
-IO_FORMER = psutil.disk_io_counters()
-CPU_INFO = cpuinfo.get_cpu_info()
if REPORT_MODE == "redis":
conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, ssl=SSL, retry_on_timeout=SOCKET_TIMEOUT)
+def net_io_counters():
+ try:
+ return psutil.net_io_counters()
+ except Exception as e:
+ logging.error(e)
+ return None
+
+
+def disk_io_counters():
+ try:
+ return psutil.disk_io_counters()
+ except Exception as e:
+ logging.error(e)
+ return None
+
+
def get_network():
global NET_FORMER
- net_temp = psutil.net_io_counters()
+ if NET_FORMER is None: return {}
+
+ net_temp = net_io_counters()
network = {'RX': {
'bytes': (net_temp.bytes_recv - NET_FORMER.bytes_recv) if (net_temp.bytes_recv - NET_FORMER.bytes_recv) > 0 else 0,
@@ -85,6 +100,8 @@ def get_network():
def get_io():
global IO_FORMER
+ if IO_FORMER is None: return {}
+
io_temp = psutil.disk_io_counters()
io = {'read': {
@@ -355,6 +372,11 @@ def report_once():
logging.info("Finish Reporting!")
+
+NET_FORMER = net_io_counters()
+IO_FORMER = disk_io_counters()
+CPU_INFO = cpuinfo.get_cpu_info()
+
while True:
try:
report_once()
From aa142ccaec3e6dc9632c8a2aced5074192bc0bd8 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 13 May 2024 15:52:03 +0800
Subject: [PATCH 55/62] Create Dockerfile
---
Dockerfile | 11 +++++++++++
1 file changed, 11 insertions(+)
create mode 100644 Dockerfile
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..8a0ec64
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,11 @@
+FROM python:3
+MAINTAINER LittleJake https://github.com/LittleJake/
+
+WORKDIR /usr/src/app
+
+COPY requirements.txt ./
+RUN pip install --no-cache-dir -r requirements.txt
+
+COPY . .
+
+CMD [ "python", "./report.py" ]
From d1b59af64e714490cf18217124fa3d2ceb67c203 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 13 May 2024 15:58:10 +0800
Subject: [PATCH 56/62] Update Dockerfile
Change to slim image
---
Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/Dockerfile b/Dockerfile
index 8a0ec64..2a7bffd 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM python:3
+FROM python:3-slim
MAINTAINER LittleJake https://github.com/LittleJake/
WORKDIR /usr/src/app
From 183a9a6a2f59f1c2c98fe73918427371f834489a Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 13 May 2024 16:00:58 +0800
Subject: [PATCH 57/62] Update README.md
Adding docker support
---
README.md | 17 +++++++++++++++++
1 file changed, 17 insertions(+)
diff --git a/README.md b/README.md
index 76a4a95..c375b10 100644
--- a/README.md
+++ b/README.md
@@ -13,6 +13,21 @@ server-monitor-script
### 安装
+
+#### Docker
+
+```bash
+git clone https://github.com/LittleJake/server-monitor-script/
+
+# 编辑.env.example文件保存为.env文件
+cp .env.example .env
+vim .env
+
+docker build -t server-monitor-script:latest ./
+docker run -d server-monitor-script:latest
+
+```
+
#### Linux
```bash
@@ -36,6 +51,8 @@ git clone https://github.com/LittleJake/server-monitor-script/
pip3 install -r requirements.txt
# 编辑.env.example文件保存为.env文件
+copy .env.example .env
+notepad .env
# 运行服务
python3 report.py
From 374de583c93b9e37ab802feca116732254c1e6c0 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 13 May 2024 16:03:23 +0800
Subject: [PATCH 58/62] Update report.py
Fixing `SyntaxWarning: invalid escape sequence '\.'`
---
report.py | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/report.py b/report.py
index 86dc62c..c9a5140 100644
--- a/report.py
+++ b/report.py
@@ -236,7 +236,7 @@ def get_ipv4():
global IPV4
if IPV4 is None:
resp = get_request(IPV4_API)
- if resp is not None and re.match("[0-9]*\.[0-9]*\.[0-9]*",resp.text) is not None:
+ if resp is not None and re.match("[0-9]*\\.[0-9]*\\.[0-9]*",resp.text) is not None:
IPV4 = resp.text
else:
IPV4 = "None"
@@ -318,7 +318,7 @@ def report_once():
COUNTRY = get_country()
logging.debug("{}x {}".format(get_cpu_core(), get_cpu_name()))
logging.debug(get_sys_version())
- logging.debug(re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()))
+ logging.debug(re.sub("[0-9]*\\.[0-9]*\\.[0-9]*", "*.*.*", get_ipv4()))
logging.debug(re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()))
logging.debug(get_uptime())
logging.debug(get_connections())
@@ -332,7 +332,7 @@ def report_once():
info = {
"CPU": "{}x {}".format(get_cpu_core(), get_cpu_name()),
"System Version": get_sys_version(),
- "IPV4": re.sub("[0-9]*\.[0-9]*\.[0-9]*", "*.*.*", get_ipv4()),
+ "IPV4": re.sub("[0-9]*\\.[0-9]*\\.[0-9]*", "*.*.*", get_ipv4()),
"IPV6": re.sub("[a-zA-Z0-9]*:", "*:", get_ipv6()),
'Uptime': get_uptime(),
'Connection': get_connections(),
From 566e08bbb3802afd398618461d6385d2c45d286b Mon Sep 17 00:00:00 2001
From: snyk-bot
Date: Tue, 18 Jun 2024 20:07:15 +0000
Subject: [PATCH 59/62] fix: requirements.txt to reduce vulnerabilities
The following vulnerabilities are fixed by pinning transitive dependencies:
- https://snyk.io/vuln/SNYK-PYTHON-URLLIB3-7267250
---
requirements.txt | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index 7cd096f..501d085 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,4 +3,5 @@ distro
psutil
requests
redis
-python-dotenv
\ No newline at end of file
+python-dotenv
+urllib3>=2.2.2 # not directly required, pinned by Snyk to avoid a vulnerability
\ No newline at end of file
From 6b33939546ee3fd368519f17ffd0c53b7d5d26f0 Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 24 Jun 2024 15:56:50 +0800
Subject: [PATCH 60/62] Update for docker
---
.env.example | 4 +++-
README.md | 2 +-
report.py | 2 ++
3 files changed, 6 insertions(+), 2 deletions(-)
diff --git a/.env.example b/.env.example
index f830bb8..361c2f8 100644
--- a/.env.example
+++ b/.env.example
@@ -20,4 +20,6 @@ IP_API="http://ip-api.com/json?fields=country,countryCode"
DISK_EXCLUDE=/run,/sys,/boot,/dev,/proc,/gdrive,/var/lib
DISK_FS_EXCLUDE=tmpfs,overlay
-DISK_OPTS_EXCLUDE=ro
\ No newline at end of file
+DISK_OPTS_EXCLUDE=ro
+
+#PROCFS_PATH=/rootfs/proc # mount for docker.
\ No newline at end of file
diff --git a/README.md b/README.md
index c375b10..711e4ac 100644
--- a/README.md
+++ b/README.md
@@ -24,7 +24,7 @@ cp .env.example .env
vim .env
docker build -t server-monitor-script:latest ./
-docker run -d server-monitor-script:latest
+docker run -v /:/rootfs:ro -d server-monitor-script:latest monitor
```
diff --git a/report.py b/report.py
index c9a5140..ef8c838 100644
--- a/report.py
+++ b/report.py
@@ -36,6 +36,7 @@
DISK_EXCLUDE = os.getenv('DISK_EXCLUDE','/run,/sys,/boot,/dev,/proc,/var/lib').split(",")
DISK_FS_EXCLUDE = os.getenv('DISK_FS_EXCLUDE', 'tmpfs,overlay').split(",")
DISK_OPTS_EXCLUDE = os.getenv('DISK_OPTS_EXCLUDE', 'ro').split(",")
+PROCFS_PATH = os.getenv('PROCFS_PATH', '/proc')
SERVER_URL = os.getenv('SERVER_URL', "")
REPORT_MODE = os.getenv('REPORT_MODE', "redis").lower()
SERVER_TOKEN = os.getenv('SERVER_TOKEN', "")
@@ -60,6 +61,7 @@
IPV6 = None
COUNTRY = None
TIME = math.floor(time.time())
+psutil.PROCFS_PATH = PROCFS_PATH
if REPORT_MODE == "redis":
conn = redis.Redis(host=HOST, password=PASSWORD, port=PORT, ssl=SSL, retry_on_timeout=SOCKET_TIMEOUT)
From 3cb090a223768f2bef3cf91a56ef4ee8cf0a04de Mon Sep 17 00:00:00 2001
From: Jake Liu <13583702+LittleJake@users.noreply.github.com>
Date: Mon, 24 Jun 2024 16:04:50 +0800
Subject: [PATCH 61/62] Update
---
README.md | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/README.md b/README.md
index 711e4ac..443849f 100644
--- a/README.md
+++ b/README.md
@@ -24,7 +24,7 @@ cp .env.example .env
vim .env
docker build -t server-monitor-script:latest ./
-docker run -v /:/rootfs:ro -d server-monitor-script:latest monitor
+docker run -v /:/rootfs:ro --name monitor -d server-monitor-script:latest
```
From 2bd76872c00e071c61e6a99dd327d9f7aaa215c6 Mon Sep 17 00:00:00 2001
From: snyk-bot
Date: Wed, 10 Jul 2024 06:37:39 +0000
Subject: [PATCH 62/62] fix: requirements.txt to reduce vulnerabilities
The following vulnerabilities are fixed by pinning transitive dependencies:
- https://snyk.io/vuln/SNYK-PYTHON-ZIPP-7430899
---
requirements.txt | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index 501d085..8b8f500 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,4 +4,5 @@ psutil
requests
redis
python-dotenv
-urllib3>=2.2.2 # not directly required, pinned by Snyk to avoid a vulnerability
\ No newline at end of file
+urllib3>=2.2.2 # not directly required, pinned by Snyk to avoid a vulnerability
+zipp>=3.19.1 # not directly required, pinned by Snyk to avoid a vulnerability
\ No newline at end of file