mirror of
https://github.com/MikroWizard/mikroman.git
synced 2025-06-20 19:45:40 +02:00
Bugs:
Fixed Firmware download from the Mikrotik website when there are multiple npk available Fixed Mikrowizard system permission error when it is set to None Fixed user device group permissions Some minor UI improvements Fix IP scan for one IP scan / Fix not scanning the last IP in the range Fix manual snippet execution not working when device groups are selected Some minor bug fixes and improvements New: Show background tasks and be able to stop them while running in the background (like an IP scanner) Add support for manual MikroWizard update dashboard/settings page update to version 1.0.5 Enhancement: Show permission error in some pages when the user doesn't have permission for that page/action show better charts/graphs in the dashboard and device interface details show more info on the dashboard about update and version information and license
This commit is contained in:
parent
a26bd6ae55
commit
70dc0ddc55
15 changed files with 296 additions and 63 deletions
|
@ -20,6 +20,7 @@ py-autoreload = 1
|
|||
#harakiri=10 - disable locally, otherwise autoreload fails
|
||||
disable-logging=1
|
||||
spooler-quiet=1
|
||||
spooler-ordered=1
|
||||
spooler-processes=6
|
||||
spooler-frequency=5
|
||||
spooler-harakiri=600
|
||||
|
@ -54,6 +55,7 @@ enable-threads = true
|
|||
vacuum = true
|
||||
disable-logging=1
|
||||
spooler-quiet=1
|
||||
spooler-ordered=1
|
||||
spooler-processes=6
|
||||
spooler-frequency=5
|
||||
spooler-harakiri=600
|
||||
|
|
8
migrations/023_tasks_update.py
Normal file
8
migrations/023_tasks_update.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
# 023_tasks_update.py
|
||||
|
||||
|
||||
def migrate(migrator, database, fake=False, **kwargs):
|
||||
|
||||
migrator.sql("""ALTER TABLE tasks
|
||||
ADD COLUMN action text not null default 'None'
|
||||
""")
|
5
migrations/024_sysconfig_update.py
Normal file
5
migrations/024_sysconfig_update.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
# 024_sysconfig_update.py
|
||||
|
||||
def migrate(migrator, database, fake=False, **kwargs):
|
||||
|
||||
migrator.sql("""INSERT INTO public.sysconfig( key, value) VALUES ( 'update_mode', 'auto')""")
|
|
@ -1 +1 @@
|
|||
__version__ = "1.0.2"
|
||||
__version__ = "1.0.5"
|
||||
|
|
|
@ -120,11 +120,15 @@ def save_editform():
|
|||
@login_required(role='admin',perm={'device_group':'read'})
|
||||
def list_devgroups():
|
||||
"""return dev groups"""
|
||||
|
||||
# build HTML of the method list
|
||||
devs = []
|
||||
uid=session.get("userid") or False
|
||||
try:
|
||||
devs=list(db_groups.query_groups_api())
|
||||
perms=list(db_user_group_perm.DevUserGroupPermRel.get_user_group_perms(uid))
|
||||
group_ids = [perm.group_id for perm in perms]
|
||||
if str(uid) == "37cc36e0-afec-4545-9219-94655805868b":
|
||||
group_ids=False
|
||||
devs=list(db_groups.query_groups_api(group_ids))
|
||||
except Exception as e:
|
||||
return buildResponse({'result':'failed','err':str(e)},200)
|
||||
return buildResponse(devs,200)
|
||||
|
@ -468,8 +472,8 @@ def dev_ifstat():
|
|||
|
||||
temp=[]
|
||||
ids=['yA','yB']
|
||||
colors=['#17522f','#171951']
|
||||
|
||||
colors=['#4caf50','#ff9800']
|
||||
bgcolor=['rgba(76, 175, 80, 0.2)','rgba(255, 152, 0, 0.2)']
|
||||
datasets=[]
|
||||
lables=[]
|
||||
tz=db_sysconfig.get_sysconfig('timezone')
|
||||
|
@ -482,9 +486,9 @@ def dev_ifstat():
|
|||
for d in data[val]:
|
||||
if len(lables) <= len(data[val]):
|
||||
edatetime=datetime.datetime.fromtimestamp(d[0]/1000)
|
||||
lables.append(util.utc2local(edatetime,tz=tz).strftime("%m/%d/%Y, %H:%M:%S %Z"))
|
||||
lables.append(util.utc2local(edatetime,tz=tz).strftime("%Y-%m-%d %H:%M:%S"))
|
||||
temp.append(round(d[1],1))
|
||||
datasets.append({'label':val,'borderColor': colors[idx],'type': 'line','yAxisID': ids[idx],'data':temp,'unit':val.split("-")[0],'backgroundColor': colors[idx],'pointHoverBackgroundColor': '#fff'})
|
||||
datasets.append({'label':val,'borderColor': colors[idx],'type': 'line','yAxisID': ids[idx],'data':temp,'unit':val.split("-")[0],'backgroundColor': bgcolor[idx],'pointHoverBackgroundColor': '#fff','fill': True})
|
||||
temp=[]
|
||||
res["data"]={'labels':lables,'datasets':datasets}
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
from flask import request
|
||||
import datetime
|
||||
|
||||
from libs.db import db,db_syslog,db_device,db_AA,db_events,db_sysconfig
|
||||
from libs.db import db,db_syslog,db_device,db_AA,db_events,db_sysconfig,db_tasks
|
||||
from libs.webutil import app,buildResponse,login_required
|
||||
import logging
|
||||
import operator
|
||||
|
@ -306,6 +306,7 @@ def dashboard_stats():
|
|||
"""return dashboard data"""
|
||||
input = request.json
|
||||
versioncheck = input.get('versioncheck',False)
|
||||
front_version = input.get('front_version',False)
|
||||
VERSIONFILE="_version.py"
|
||||
from _version import __version__
|
||||
res={}
|
||||
|
@ -342,13 +343,15 @@ def dashboard_stats():
|
|||
res['Devices']=devs.select().count()
|
||||
res['Auth']=auth.select().count()
|
||||
res['Acc']=acc.select().count()
|
||||
res['Registred']=False
|
||||
res['license']=False
|
||||
username=False
|
||||
internet_connection=True
|
||||
# check for internet connection before getting data from website
|
||||
feedurl="https://mikrowizard.com/tag/Blog/feed/?orderby=latest"
|
||||
test_url="https://google.com"
|
||||
update_mode=db_sysconfig.get_sysconfig('update_mode')
|
||||
update_mode=json.loads(update_mode)
|
||||
res['update_mode']=update_mode['mode']
|
||||
try:
|
||||
req = requests.get(test_url, timeout=(0.5,1))
|
||||
req.raise_for_status()
|
||||
|
@ -371,11 +374,32 @@ def dashboard_stats():
|
|||
if internet_connection:
|
||||
response = requests.post(url, json=params)
|
||||
response=response.json()
|
||||
# log.error(response)
|
||||
res['license']=response.get('license',False)
|
||||
res['update_available']=response.get('available',False)
|
||||
res['latest_version']=response.get('latest_version',False)
|
||||
res['update_inprogress']=update_mode['update_back']
|
||||
else:
|
||||
res['license']='connection_error'
|
||||
res['update_available']=False
|
||||
res['latest_version']=False
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
if front_version and internet_connection:
|
||||
params['version']=front_version
|
||||
params['front']=True
|
||||
response = requests.post(url, json=params)
|
||||
response=response.json()
|
||||
res['front_update_available']=response.get('available',False)
|
||||
res['front_latest_version']=response.get('latest_version',False)
|
||||
res['front_update_inprogress']=update_mode['update_front']
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
res['front_update_available']=True
|
||||
res['update_available']=True
|
||||
if username:
|
||||
res['username']=username
|
||||
res['blog']=[]
|
||||
|
@ -410,11 +434,12 @@ def dashboard_stats():
|
|||
def get_version():
|
||||
"""return version info and serial in crypted format for front updater service"""
|
||||
VERSIONFILE="_version.py"
|
||||
log.error("front_update_request")
|
||||
from _version import __version__
|
||||
res={}
|
||||
res['version']=__version__
|
||||
try:
|
||||
res['username']=username = db_sysconfig.get_sysconfig('username')
|
||||
res['username']=db_sysconfig.get_sysconfig('username')
|
||||
except:
|
||||
res['username']=False
|
||||
interfaces = util.get_ethernet_wifi_interfaces()
|
||||
|
@ -424,10 +449,21 @@ def get_version():
|
|||
install_date=db_sysconfig.get_sysconfig('install_date')
|
||||
except:
|
||||
pass
|
||||
update_mode=db_sysconfig.get_sysconfig('update_mode')
|
||||
update_mode=json.loads(update_mode)
|
||||
if install_date:
|
||||
res['serial']=hwid + "-" + datetime.datetime.strptime(install_date, "%Y-%m-%d %H:%M:%S").strftime("%Y%m%d")
|
||||
if update_mode['mode']=='manual':
|
||||
if not update_mode['update_front']:
|
||||
hwid=hwid+"MANUAL"
|
||||
else:
|
||||
update_mode['update_front']=False
|
||||
db_sysconfig.set_sysconfig('update_mode',json.dumps(update_mode))
|
||||
res['serial'] = hwid + "-" + datetime.datetime.strptime(install_date, "%Y-%m-%d %H:%M:%S").strftime("%Y%m%d")
|
||||
if update_mode=='update_now':
|
||||
db_sysconfig.update_sysconfig('update_mode','manual')
|
||||
else:
|
||||
res['serial']=False
|
||||
log.error(res)
|
||||
res=util.crypt_data(json.dumps(res))
|
||||
return buildResponse(res, 200)
|
||||
|
||||
|
@ -477,8 +513,8 @@ def dashboard_traffic():
|
|||
|
||||
temp=[]
|
||||
ids=['yA','yB']
|
||||
colors=['#17522f','#171951']
|
||||
|
||||
colors=['#4caf50','#ff9800']
|
||||
bgcolor=['rgba(76, 175, 80, 0.2)','rgba(255, 152, 0, 0.2)']
|
||||
datasets=[]
|
||||
lables=[]
|
||||
data_keys=['tx-{}'.format(interface),'rx-{}'.format(interface)]
|
||||
|
@ -491,7 +527,7 @@ def dashboard_traffic():
|
|||
if len(lables) <= len(data[val]):
|
||||
lables.append(datetime.datetime.fromtimestamp(d[0]/1000))
|
||||
temp.append(round(d[1],1))
|
||||
datasets.append({'label':val,'borderColor': colors[idx],'type': 'line','yAxisID': ids[idx],'data':temp,'unit':val.split("-")[0],'backgroundColor': colors[idx],'pointHoverBackgroundColor': '#fff'})
|
||||
datasets.append({'label':val,'borderColor': colors[idx],'type': 'line','yAxisID': ids[idx],'data':temp,'unit':val.split("-")[0],'backgroundColor': bgcolor[idx],'pointHoverBackgroundColor': '#fff','fill': True})
|
||||
temp=[]
|
||||
res["data"]={'labels':lables,'datasets':datasets}
|
||||
|
||||
|
@ -499,4 +535,17 @@ def dashboard_traffic():
|
|||
log.error(e)
|
||||
return buildResponse({'status': 'failed'}, 200, error=e)
|
||||
pass
|
||||
return buildResponse(res,200)
|
||||
return buildResponse(res,200)
|
||||
|
||||
@app.route('/api/dashboard/tasks/running', methods = ['POST'])
|
||||
@login_required(role='admin', perm={'settings':'read'})
|
||||
def dashboard_tasks_running():
|
||||
"""return all running tasks"""
|
||||
input = request.json
|
||||
tasks=db_tasks.Tasks
|
||||
try:
|
||||
res=tasks.select().where(tasks.status=='running').dicts()
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
return buildResponse({'status': 'failed'}, 200, error=e)
|
||||
return buildResponse(res,200)
|
||||
|
|
|
@ -9,6 +9,7 @@ from flask import request,session
|
|||
|
||||
from libs.db import db_user_tasks,db_syslog,db_tasks,db_sysconfig
|
||||
from libs.webutil import app, login_required,buildResponse,get_myself,get_ip,get_agent
|
||||
from libs.db.db_groups import devs, get_devs_of_groups
|
||||
from functools import reduce
|
||||
import bgtasks
|
||||
import operator
|
||||
|
@ -117,8 +118,17 @@ def exec_snippet():
|
|||
return buildResponse({'status': 'failed'},200,error="Wrong name/desc")
|
||||
#check if cron is valid and correct
|
||||
taskdata={}
|
||||
taskdata['memebrs']=members
|
||||
taskdata['owner']=members
|
||||
if selection_type=="devices":
|
||||
taskdata['memebrs']=members
|
||||
elif selection_type=="groups":
|
||||
devs=get_devs_of_groups(members)
|
||||
devids=[dev.id for dev in devs]
|
||||
taskdata['memebrs']=devids
|
||||
uid = session.get("userid") or False
|
||||
if not uid:
|
||||
return buildResponse({'result':'failed','err':"No User"}, 200)
|
||||
taskdata['owner']=str(uid)
|
||||
default_ip=db_sysconfig.get_sysconfig('default_ip')
|
||||
snipet=db_user_tasks.get_snippet(snippetid)
|
||||
if snipet:
|
||||
taskdata['snippet']={'id':snipet.id,'code':snipet.content,'description':snipet.description,'name':snipet.name}
|
||||
|
@ -144,12 +154,8 @@ def exec_snippet():
|
|||
}
|
||||
task=utasks.create(**data)
|
||||
status=db_tasks.exec_snipet_status().status
|
||||
uid = session.get("userid") or False
|
||||
default_ip=db_sysconfig.get_sysconfig('default_ip')
|
||||
if not uid:
|
||||
return buildResponse({'result':'failed','err':"No User"}, 200)
|
||||
if not status:
|
||||
bgtasks.exec_snipet(task=task,default_ip=default_ip,devices=members,uid=uid)
|
||||
bgtasks.exec_snipet(task=task,default_ip=default_ip,devices=taskdata['memebrs'],uid=uid)
|
||||
res={'status': True}
|
||||
else:
|
||||
res={'status': status}
|
||||
|
@ -158,8 +164,7 @@ def exec_snippet():
|
|||
return buildResponse([{'status': 'success'}],200)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
return buildResponse({'status': 'failed','massage':str(e)},200)
|
||||
|
||||
return buildResponse({'status': 'failed','massage':str(e)},200)
|
||||
|
||||
@app.route('/api/snippet/executed', methods = ['POST'])
|
||||
@login_required(role='admin',perm={'task':'write'})
|
||||
|
|
|
@ -6,13 +6,16 @@
|
|||
# Author: sepehr.ha@gmail.com
|
||||
|
||||
from flask import request
|
||||
|
||||
from libs.db import db_sysconfig,db_syslog
|
||||
import uwsgi
|
||||
import signal
|
||||
import os
|
||||
from libs.db import db_sysconfig,db_syslog, db_tasks
|
||||
from libs import util
|
||||
from libs.webutil import app, login_required,buildResponse,get_myself,get_ip,get_agent
|
||||
import time
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
log = logging.getLogger("api.sysconfig")
|
||||
|
||||
|
@ -44,8 +47,91 @@ def sysconfig_save_all():
|
|||
continue
|
||||
elif k=="default_password" or k=="default_user":
|
||||
v['value']=util.crypt_data(v['value'])
|
||||
elif k=="update_mode":
|
||||
v['value']=json.dumps(v['value'])
|
||||
data.append({"key":k,"value":v['value'],"modified":"NOW"})
|
||||
db_syslog.add_syslog_event(get_myself(), "Sys Config","Update", get_ip(),get_agent(),json.dumps(input))
|
||||
db_sysconfig.save_all(data)
|
||||
|
||||
return buildResponse({"status":"success"})
|
||||
|
||||
@app.route('/api/tasks/list', methods = ['POST'])
|
||||
@login_required(role='admin',perm={'settings':'read'})
|
||||
def tasks_list():
|
||||
"""get all tasks"""
|
||||
input = request.json
|
||||
res=[]
|
||||
res=db_tasks.get_all().dicts()
|
||||
for t in res:
|
||||
t['name']=t['name'].replace("-"," ").replace("_"," ")
|
||||
return buildResponse({"tasks":res})
|
||||
|
||||
@app.route('/api/tasks/stop', methods = ['POST'])
|
||||
@login_required(role='admin',perm={'settings':'write'})
|
||||
def stop_task():
|
||||
"""get all tasks"""
|
||||
input = request.json
|
||||
task_signal = int(input['signal'])
|
||||
task=db_tasks.get_task_by_signal(task_signal)
|
||||
res=[]
|
||||
#remove spooler file to stop task
|
||||
#list files under directory
|
||||
if not task:
|
||||
return buildResponse({'result':'failed','err':"No task"}, 200)
|
||||
spooldir=uwsgi.opt['spooler'].decode()+'/'+str(task_signal)
|
||||
#list all files and remove them in spooldir
|
||||
files = []
|
||||
try:
|
||||
if os.path.exists(spooldir):
|
||||
for file in os.listdir(spooldir):
|
||||
file_path = os.path.join(spooldir, file)
|
||||
if os.path.isfile(file_path):
|
||||
os.remove(file_path)
|
||||
files.append(file)
|
||||
except Exception as e:
|
||||
log.error(f"Error removing spool files: {str(e)}")
|
||||
return buildResponse({'result':'failed','err':str(e)}, 200)
|
||||
pid=uwsgi.spooler_pid()
|
||||
#kill pid to stop task
|
||||
if task_signal not in [130,140]:
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM) # Attempt graceful shutdown
|
||||
except ProcessLookupError:
|
||||
return buildResponse({'result':'failed','err':'Spooler not running'}, 200)
|
||||
except PermissionError:
|
||||
return buildResponse({'result':'failed','err':'Permission denied to reload spooler process'}, 200)
|
||||
except Exception as e:
|
||||
return buildResponse({'result':'failed','err':str(e)}, 200)
|
||||
else:
|
||||
task.action="cancel"
|
||||
task.status=False
|
||||
task.save()
|
||||
return buildResponse({"status":"success"})
|
||||
task.status=False
|
||||
task.action="None"
|
||||
task.save()
|
||||
return buildResponse({"status":"success"})
|
||||
|
||||
|
||||
|
||||
@app.route('/api/sysconfig/apply_update', methods = ['POST'])
|
||||
@login_required(role='admin',perm={'settings':'write'})
|
||||
def apply_update():
|
||||
"""apply update"""
|
||||
input = request.json
|
||||
action = input['action']
|
||||
update_mode=db_sysconfig.get_sysconfig('update_mode')
|
||||
update_mode=json.loads(update_mode)
|
||||
|
||||
if update_mode['mode']=='manual':
|
||||
if action=='update_mikroman':
|
||||
update_mode['update_back']=True
|
||||
db_sysconfig.set_sysconfig('update_mode',json.dumps(update_mode))
|
||||
Path('/app/reload').touch()
|
||||
return buildResponse({"status":"success"})
|
||||
if action=='update_mikrofront':
|
||||
update_mode['update_front']=True
|
||||
db_sysconfig.set_sysconfig('update_mode',json.dumps(update_mode))
|
||||
return buildResponse({"status":"success"})
|
||||
return buildResponse({"status":"success"})
|
||||
|
||||
|
|
|
@ -38,9 +38,19 @@ def serialize_datetime(obj):
|
|||
if isinstance(obj, datetime.datetime):
|
||||
return obj.isoformat()
|
||||
|
||||
def cancel_task(task_name='',task=0):
|
||||
log.info(f"Canceling task {task_name}")
|
||||
task.action='None'
|
||||
task.status=0
|
||||
task.save()
|
||||
return True
|
||||
|
||||
@spool(pass_arguments=True)
|
||||
def check_devices_for_update(*args, **kwargs):
|
||||
task=db_tasks.update_check_status()
|
||||
if task.action=='cancel':
|
||||
cancel_task('Firmware Check',task)
|
||||
return False
|
||||
if not task.status:
|
||||
task.status=1
|
||||
task.save()
|
||||
|
@ -70,7 +80,7 @@ def check_devices_for_update(*args, **kwargs):
|
|||
if not qres.get("reason",False):
|
||||
res.append(qres)
|
||||
else:
|
||||
db_events.connection_event(dev.id,qres["reason"])
|
||||
db_events.connection_event(qres['id'],'Firmware updater',qres.get("detail","connection"),"Critical",0,qres.get("reason","problem in Frimware updater"))
|
||||
db_device.update_devices_firmware_status(res)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
|
@ -85,6 +95,9 @@ def check_devices_for_update(*args, **kwargs):
|
|||
@spool(pass_arguments=True)
|
||||
def update_device(*args, **kwargs):
|
||||
task=db_tasks.update_job_status()
|
||||
if task.action=='cancel':
|
||||
cancel_task('Firmware Update',task)
|
||||
return False
|
||||
if not task.status:
|
||||
task.status=1
|
||||
task.save()
|
||||
|
@ -127,6 +140,9 @@ def update_device(*args, **kwargs):
|
|||
@spool(pass_arguments=True)
|
||||
def download_firmware(*args, **kwargs):
|
||||
task=db_tasks.downloader_job_status()
|
||||
if task.action=='cancel':
|
||||
cancel_task('Firmware Download',task)
|
||||
return False
|
||||
if not task.status:
|
||||
task.status=1
|
||||
task.save()
|
||||
|
@ -144,21 +160,29 @@ def download_firmware(*args, **kwargs):
|
|||
t.join()
|
||||
res=[]
|
||||
for _ in range(num_threads):
|
||||
action=db_tasks.downloader_job_status().action
|
||||
if action=='cancel':
|
||||
cancel_task('Firmware Download',task)
|
||||
return False
|
||||
qres=q.get()
|
||||
print(qres)
|
||||
# db_device.update_devices_firmware_status(res)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
task.status=0
|
||||
task.action='None'
|
||||
task.save()
|
||||
return False
|
||||
task.status=0
|
||||
task.action='None'
|
||||
task.save()
|
||||
return False
|
||||
|
||||
@spool(pass_arguments=True)
|
||||
def backup_devices(*args, **kwargs):
|
||||
task=db_tasks.backup_job_status()
|
||||
if task.action=='cancel':
|
||||
cancel_task('Backup',task)
|
||||
return False
|
||||
if not task.status:
|
||||
task.status=1
|
||||
task.save()
|
||||
|
@ -258,6 +282,9 @@ def scan_with_mac(timer=2):
|
|||
def scan_with_ip(*args, **kwargs):
|
||||
try:
|
||||
task=db_tasks.scanner_job_status()
|
||||
if task.action=='cancel':
|
||||
cancel_task('IP Scan',task)
|
||||
return False
|
||||
task.status=1
|
||||
task.save()
|
||||
start_ip=kwargs.get('start',False)
|
||||
|
@ -281,14 +308,15 @@ def scan_with_ip(*args, **kwargs):
|
|||
end_ip = ipaddress.IPv4Address(end_ip)
|
||||
scan_port=kwargs.get('port',False)
|
||||
default_user,default_pass=util.get_default_user_pass()
|
||||
log.error("stating scan ")
|
||||
log.error("starting scan ")
|
||||
mikrotiks=[]
|
||||
scan_results=[]
|
||||
dev_number=0
|
||||
|
||||
|
||||
|
||||
for ip_int in range(int(start_ip), int(end_ip)):
|
||||
for ip_int in range(int(start_ip), int(end_ip)+1):
|
||||
task=db_tasks.scanner_job_status()
|
||||
if task.action=='cancel':
|
||||
cancel_task('IP Scan',task)
|
||||
return False
|
||||
ip=str(ipaddress.IPv4Address(ip_int))
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
sock.settimeout(0.2)
|
||||
|
@ -438,6 +466,9 @@ def scan_with_ip(*args, **kwargs):
|
|||
@spool(pass_arguments=True)
|
||||
def exec_snipet(*args, **kwargs):
|
||||
task=db_tasks.exec_snipet_status()
|
||||
if task.action=='cancel':
|
||||
cancel_task('Snipet Exec',task)
|
||||
return False
|
||||
if not task.status:
|
||||
task.status=1
|
||||
task.save()
|
||||
|
@ -458,7 +489,6 @@ def exec_snipet(*args, **kwargs):
|
|||
num_threads = len(devs)
|
||||
q = queue.Queue()
|
||||
threads = []
|
||||
log.error(devs)
|
||||
for dev in devs:
|
||||
peer_ip=dev.peer_ip if dev.peer_ip else default_ip
|
||||
if not peer_ip and '[mikrowizard]' in taskdata['snippet']['code']:
|
||||
|
@ -495,6 +525,11 @@ def exec_snipet(*args, **kwargs):
|
|||
def exec_vault(*args, **kwargs):
|
||||
Tasks=db_tasks.Tasks
|
||||
task=Tasks.select().where(Tasks.signal == 170).get()
|
||||
if(task.action=='cancel'):
|
||||
cancel_task('Vault Exec',task)
|
||||
return False
|
||||
if not ISPRO:
|
||||
return False
|
||||
if not task.status:
|
||||
try:
|
||||
task.status=1
|
||||
|
|
|
@ -105,9 +105,10 @@ def devs2(groupid):
|
|||
.join(DevGroupRel, on=DevGroupRel.device_id)
|
||||
.where(DevGroupRel.group_id == groupid)
|
||||
.order_by(Devices.name))
|
||||
|
||||
def get_devs_of_groups(group_ids):
|
||||
try:
|
||||
group_ids=[group.id for group in group_ids]
|
||||
group_ids=[group if isinstance(group, int) else group.id for group in group_ids]
|
||||
if 1 in group_ids:
|
||||
return list(Devices
|
||||
.select()
|
||||
|
@ -122,9 +123,12 @@ def get_devs_of_groups(group_ids):
|
|||
return []
|
||||
|
||||
#get all groups including devices in each group
|
||||
def query_groups_api():
|
||||
def query_groups_api(group_ids=[]):
|
||||
t3=DevGroups.alias()
|
||||
q=DevGroups.select(DevGroups.id,DevGroups.name,DevGroups.created,fn.array_agg(DevGroupRel.device_id)).join(DevGroupRel,JOIN.LEFT_OUTER, on=(DevGroupRel.group_id == DevGroups.id)).order_by(DevGroups.id).group_by(DevGroups.id)
|
||||
if not isinstance(group_ids, list):
|
||||
q=DevGroups.select(DevGroups.id,DevGroups.name,DevGroups.created,fn.array_agg(DevGroupRel.device_id)).join(DevGroupRel,JOIN.LEFT_OUTER, on=(DevGroupRel.group_id == DevGroups.id)).order_by(DevGroups.id).group_by(DevGroups.id)
|
||||
else:
|
||||
q=DevGroups.select(DevGroups.id,DevGroups.name,DevGroups.created,fn.array_agg(DevGroupRel.device_id)).join(DevGroupRel,JOIN.LEFT_OUTER, on=(DevGroupRel.group_id == DevGroups.id)).where(DevGroups.id << group_ids).order_by(DevGroups.id).group_by(DevGroups.id)
|
||||
return list(q.dicts())
|
||||
|
||||
def get_groups_by_id(ids):
|
||||
|
@ -143,7 +147,6 @@ def delete_from_group(devids):
|
|||
|
||||
def delete_device(devid):
|
||||
try:
|
||||
|
||||
delete_from_group([devid])
|
||||
dev = get_object_or_none(Devices, id=devid)
|
||||
dev.delete_instance(recursive=True)
|
||||
|
|
|
@ -17,7 +17,8 @@ class Tasks(BaseModel):
|
|||
starttime = DateTimeField()
|
||||
endtime = DateTimeField()
|
||||
status = BooleanField()
|
||||
|
||||
action = TextField()
|
||||
name = TextField()
|
||||
class Meta:
|
||||
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
|
||||
# a tuple of column names to index and a boolean indicating
|
||||
|
@ -32,7 +33,6 @@ def update_check_status():
|
|||
def update_job_status():
|
||||
return (Tasks.select().where(Tasks.signal == 110).get())
|
||||
|
||||
|
||||
#Get groups of device
|
||||
def backup_job_status():
|
||||
return (Tasks.select().where(Tasks.signal == 120).get())
|
||||
|
@ -51,6 +51,14 @@ def firmware_service_status():
|
|||
def exec_snipet_status():
|
||||
return (Tasks.select().where(Tasks.signal == 160).get())
|
||||
|
||||
def get_running_tasks():
|
||||
return (Tasks.select().where(Tasks.status == True))
|
||||
|
||||
def get_task_by_signal(signal):
|
||||
return (Tasks.select().where(Tasks.signal == signal).get())
|
||||
|
||||
def get_all():
|
||||
return (Tasks.select())
|
||||
|
||||
class TaskResults(BaseModel):
|
||||
task_type = TextField()
|
||||
|
|
|
@ -11,7 +11,7 @@ import time
|
|||
import uuid
|
||||
import socket
|
||||
import config
|
||||
from libs.db import db_sysconfig,db_firmware,db_backups,db_events
|
||||
from libs.db import db_sysconfig,db_firmware,db_tasks,db_events
|
||||
from cryptography.fernet import Fernet
|
||||
from libs.check_routeros.routeros_check.resource import RouterOSCheckResource
|
||||
from libs.check_routeros.routeros_check.helper import RouterOSVersion
|
||||
|
@ -37,26 +37,31 @@ except ImportError:
|
|||
import zipfile
|
||||
|
||||
def extract_from_link(link,all_package=False):
|
||||
if all_package:
|
||||
regex = r"https:\/\/download\.mikrotik\.com\/routeros\/(\d{1,3}.*)?\/all_packages-(.*)-(.*).zip"
|
||||
matches = re.match(regex, link)
|
||||
if not matches:
|
||||
return False
|
||||
res=matches.groups()
|
||||
version=res[0]
|
||||
arch = res[1]
|
||||
return {"link":link, "arch":arch, "version":version, "all_package":True}
|
||||
else:
|
||||
regex = r"https:\/\/download\.mikrotik\.com\/routeros\/(\d{1,3}.*)?\/routeros-(.*).npk"
|
||||
matches = re.match(regex,link)
|
||||
res=matches.groups()
|
||||
version=res[0]
|
||||
arch = res[1].replace(version, "")
|
||||
if arch == "":
|
||||
arch = "x86"
|
||||
try:
|
||||
if all_package:
|
||||
regex = r"https:\/\/download\.mikrotik\.com\/routeros\/(\d{1,3}.*)?\/all_packages-(.*)-(.*).zip"
|
||||
matches = re.match(regex, link)
|
||||
if not matches:
|
||||
return False
|
||||
res=matches.groups()
|
||||
version=res[0]
|
||||
arch = res[1]
|
||||
return {"link":link, "arch":arch, "version":version, "all_package":True}
|
||||
else:
|
||||
arch=arch.replace("-","")
|
||||
return {"link":link,"arch":arch, "version":version}
|
||||
regex = r"https:\/\/download\.mikrotik\.com\/routeros\/(\d{1,3}.*)?\/routeros-(.*).npk"
|
||||
matches = re.match(regex,link)
|
||||
res=matches.groups()
|
||||
version=res[0]
|
||||
arch = res[1].replace(version, "")
|
||||
if arch == "":
|
||||
arch = "x86"
|
||||
else:
|
||||
arch=arch.replace("-","")
|
||||
return {"link":link,"arch":arch, "version":version}
|
||||
except Exception as e:
|
||||
log.info("unable to extract from link : {}".format(link))
|
||||
log.info(e)
|
||||
return False
|
||||
|
||||
|
||||
def get_mikrotik_latest_firmware_link():
|
||||
|
@ -68,6 +73,8 @@ def get_mikrotik_latest_firmware_link():
|
|||
link=str(link.get('href'))
|
||||
if ".npk" in link:
|
||||
frimware=extract_from_link(link)
|
||||
if not frimware:
|
||||
continue
|
||||
firms.setdefault(frimware["version"],{})
|
||||
firms[frimware["version"]][frimware["arch"]]={"link":frimware["link"],"mark":"latest"}
|
||||
# firms.append(link)
|
||||
|
@ -87,7 +94,10 @@ def get_mikrotik_download_links(version,all_package=False):
|
|||
lnk=str(link[0].get('href'))
|
||||
sha=str(link[1].get('data-checksum-sha256'))
|
||||
if ".npk" in lnk:
|
||||
log.error(lnk)
|
||||
frimware=extract_from_link(lnk)
|
||||
if not frimware:
|
||||
continue
|
||||
firms.setdefault(frimware["version"], {})
|
||||
firms[frimware["version"]][frimware["arch"]]={"link":frimware["link"],"sha":sha}
|
||||
# firms.append(link)
|
||||
|
@ -206,6 +216,12 @@ def download_firmware_to_repository(version,q,arch="all",all_package=False):
|
|||
links=links[version]
|
||||
firm=db_firmware.Firmware()
|
||||
for lnk in links:
|
||||
task=db_tasks.downloader_job_status()
|
||||
if task.action=="cancel":
|
||||
log.info("Firmware Download Task Canceled")
|
||||
if q:
|
||||
q.put({"status":False})
|
||||
return False
|
||||
if all_package and arch+"-allpackage" == lnk:
|
||||
arch_togo=lnk
|
||||
link=links[lnk]["link"]
|
||||
|
|
|
@ -280,7 +280,7 @@ def grab_device_data(dev, q):
|
|||
if d['name'] in excluded_keys:
|
||||
continue
|
||||
health_vals[d['name']]=d['value']
|
||||
elif result['board-name']=='x86':
|
||||
elif result['board-name']=='x86' or 'x86' in result['architecture-name']:
|
||||
health_vals={}
|
||||
else:
|
||||
health_vals: Dict[str, str] = health[0]
|
||||
|
@ -579,7 +579,7 @@ def check_update(options,router=False):
|
|||
log.error(e)
|
||||
pass
|
||||
upgrade=False
|
||||
if result['board-name']!='x86' and result['current-firmware']!= result['upgrade-firmware'] and result['board-name']!='x86':
|
||||
if 'x86' not in result['board-name'] and result['current-firmware']!= result['upgrade-firmware'] and result['board-name']!='x86':
|
||||
upgrade=True
|
||||
if _latest_version and _installed_version < _latest_version:
|
||||
return True, _installed_version,arch,upgrade
|
||||
|
|
|
@ -261,10 +261,12 @@ def _is_role_atleast(myrole, rolebase, perm):
|
|||
return "userid" in session
|
||||
userperms=session.get("perms") or {}
|
||||
perms = { "None":1,"read":2, "write":3, "full":4}
|
||||
res=True;
|
||||
res=True
|
||||
if len(perm)>0:
|
||||
for key, value in perm.items():
|
||||
if key in userperms:
|
||||
if userperms[key]=='none':
|
||||
res=False
|
||||
res=res and perms[userperms[key]]>=perms[value]
|
||||
else:
|
||||
return False
|
||||
|
|
|
@ -16,6 +16,7 @@ import os
|
|||
import hashlib
|
||||
import zipfile
|
||||
import subprocess
|
||||
import json
|
||||
log = logging.getLogger("Updater_mule")
|
||||
import pip
|
||||
|
||||
|
@ -67,6 +68,7 @@ def check_sha256(filename, expect):
|
|||
return False
|
||||
|
||||
def extract_zip_reload(filename,dst):
|
||||
return True
|
||||
"""Extract the contents of the zip file "filename" to the directory
|
||||
"dst". Then reload the updated modules."""
|
||||
with zipfile.ZipFile(filename, 'r') as zip_ref:
|
||||
|
@ -114,6 +116,14 @@ def main():
|
|||
print("Running hourly Update checker ...")
|
||||
interfaces = util.get_ethernet_wifi_interfaces()
|
||||
hwid = util.generate_serial_number(interfaces)
|
||||
update_mode=db_sysconfig.get_sysconfig('update_mode')
|
||||
update_mode=json.loads(update_mode)
|
||||
if update_mode['mode']=='manual':
|
||||
if not update_mode['update_back']:
|
||||
hwid=hwid+"MANUAL"
|
||||
else:
|
||||
update_mode['update_back']=False
|
||||
db_sysconfig.set_sysconfig('update_mode',json.dumps(update_mode))
|
||||
username=False
|
||||
try:
|
||||
username = db_sysconfig.get_sysconfig('username')
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue