MikroWizard Initial commit | MikroMan Welcome to the world :)

This commit is contained in:
sepehr 2024-07-20 15:48:46 +03:30
commit 8c49b9a55d
96 changed files with 12274 additions and 0 deletions

166
.dockerignore Normal file
View file

@ -0,0 +1,166 @@
**/firms
# Byte-compiled / optimized / DLL files
**/__pycache__
**/.vscode
*.so
**/backups
# Distribution / packaging
.Python
**/build/
**/develop-eggs
**/dist
**/downloads
**/eggs
.eggs/
**/lib
**/lib64
**/parts
**/sdist
**/var
**/wheels
**/test
**/share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
py.code-workspace
app.log
real-server-config.json
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
**/test
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
tests/
reload/
# pytype static type analyzer
.pytype/
firmwares/
# Cython debug symbols
cython_debug/
.git/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
*pro_api*
*pro.py

169
.gitignore vendored Normal file
View file

@ -0,0 +1,169 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
.vscode/
# C extensions
*.so
backups/
firms/
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
test/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
py.code-workspace
app.log
real-server-config.json
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
.pdm.toml
.pdm-python
.pdm-build/
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
*pro_api*
*pro.py

79
Dockerfile Normal file
View file

@ -0,0 +1,79 @@
FROM python:3.11-slim-bullseye
WORKDIR /app
# uwsgi must be compiled - install necessary build tools, compile uwsgi
# and then remove the build tools to minimize image size
# (buildDeps are removed, deps are kept)
RUN apt-get update && apt-get -y install cron
RUN apt-get install -y iputils-ping
RUN apt-get install -y net-tools
RUN touch /var/log/cron.log
COPY reqs.txt /app/reqs.txt
RUN set -ex \
&& buildDeps=' \
build-essential \
gcc \
' \
&& deps=' \
htop \
' \
&& apt-get install -y $buildDeps $deps --no-install-recommends && rm -rf /var/lib/apt/lists/* \
&& pip install uWSGI==2.0.22 \
&& pip install -r /app/reqs.txt \
&& apt-get purge -y --auto-remove $buildDeps \
&& find /usr/local -depth \
\( \
\( -type d -a -name test -o -name tests \) \
-o \
\( -type f -a -name '*.pyc' -o -name '*.pyo' \) \
\) -exec rm -rf '{}' +
# install other py libs - not require compilation
# copy source files
COPY conf /app/conf
COPY py /app/py
#COPY real-server-config.json /app/real-server-config.json
COPY migrations /app/migrations
#COPY migrations_sqlite /app/migrations_sqlite
COPY scripts /app/scripts
COPY templates /app/templates
#COPY test /app/test
COPY conf/loginscript.sh /etc/profile
COPY migratedb.py /app/
# background spooler dir
RUN mkdir /tmp/pysrv_spooler
# we don't need this file with Docker but uwsgi looks for it
RUN echo `date +%s` >/app/VERSION
EXPOSE 80
# our server config file
# - you should write your own config file and put OUTSIDE the repository
# since the config file contains secrets
# - here I use the sample template from repo
# - it is also possible to override the config with env variables, either here
# or in Amazon ECS or Kubernetes configuration
#COPY /app/real-server-config.json /app/real-server-config.json
# ENV PYSRV_DATABASE_HOST host.docker.internal
# ENV PYSRV_REDIS_HOST host.docker.internal
# ENV PYSRV_DATABASE_PASSWORD x
# build either a production or dev image
ARG BUILDMODE=production
ENV ENVBUILDMODE=$BUILDMODE
RUN echo "BUILDMODE $ENVBUILDMODE"
# run in shell mode with ENV expansion
#CMD /etc/init.d/cron start ; uwsgi --ini /app/conf/uwsgi.ini:uwsgi-$ENVBUILDMODE --touch-reload=/app/reload
RUN apt update
RUN apt install -y nano
CMD cron ; uwsgi --ini /app/conf/uwsgi.ini:uwsgi-$ENVBUILDMODE --touch-reload=/app/reload

661
LICENSE Normal file
View file

@ -0,0 +1,661 @@
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.

1
README.md Normal file
View file

@ -0,0 +1 @@

5
build.sh Executable file
View file

@ -0,0 +1,5 @@
#!/bin/sh
# run in dev mode
sudo docker build --rm --build-arg BUILDMODE=production -t mikroman .

BIN
conf/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.4 KiB

9
conf/loginscript.sh Normal file
View file

@ -0,0 +1,9 @@
# this is /etc/profile
# - a login script when running the interactive shell inside the container
export PYTHONPATH=/app/py
export PYSRV_CONFIG_PATH=/app/conf/real-server-config.json
export FLASK_ENV=development
alias l='ls'
alias ll='ls -l'

27
conf/pydaemon.service Normal file
View file

@ -0,0 +1,27 @@
# systemd service configuration - uwsgi daemon
#
# https://www.digitalocean.com/community/tutorials/understanding-systemd-units-and-unit-files
# https://www.digitalocean.com/community/tutorials/how-to-serve-flask-applications-with-uwsgi-and-nginx-on-ubuntu-16-04
# make start on boot: systemctl enable mydaemon
[Unit]
Description=pysrv uwsgi daemon
After=network.target
[Service]
User=root
#User=myapp # user privileges are set by uwsgi
#Group=mygroup
# note: create /tmp/pysrv_spooler on reboot
ExecStartPre=/bin/mkdir -p /tmp/pysrv_spooler;
ExecStart=/usr/local/bin/uwsgi --ini /app/conf/uwsgi.ini:uwsgi-production
RuntimeDirectory=mydaemon
Restart=always
RestartSec=3
KillSignal=SIGQUIT
[Install]
WantedBy=multi-user.target

3
conf/robots.txt Normal file
View file

@ -0,0 +1,3 @@
User-agent: *
Disallow: /

19
conf/server-config.json Normal file
View file

@ -0,0 +1,19 @@
{
"name": "python server config template - rename me",
"PYSRV_IS_PRODUCTION": "",
"PYSRV_DATABASE_HOST": "/app/data/mydb.sqlite",
"PYSRV_DATABASE_HOST_POSTGRESQL": "host.docker.internal",
"PYSRV_DATABASE_HOST_SQLITE": "/app/data/mydb.sqlite",
"PYSRV_DATABASE_PORT": "54320",
"PYSRV_DATABASE_NAME": "tmdb",
"PYSRV_DATABASE_USER": "tm",
"PYSRV_DATABASE_PASSWORD": "MY_PASSWORD",
"PYSRV_COOKIE_HTTPS_ONLY": false,
"PYSRV_REDIS_HOST": "host.docker.internal:6379",
"PYSRV_DOMAIN_NAME": "",
"PYSRV_CORS_ALLOW_ORIGIN": "*"
}

73
conf/uwsgi.ini Normal file
View file

@ -0,0 +1,73 @@
# uwsgi daemon config
# https://uwsgi-docs.readthedocs.io/en/latest/Options.html
# old: local dev - plain python, no docker
[uwsgi-docker-dev]
env = FLASK_ENV=development
env = PYSRV_CONFIG_PATH=/conf/server-conf.json
http = host.docker.internal:8181
master = 1
wsgi-file = py/main.py
callable = app
# processes = 1, otherwise autoreload fails
processes = 4
chdir = /app/
pythonpath = /app/py/
stats = 127.0.0.1:9100
#virtualenv = $(SERVER_VIRTUALENV)
py-autoreload = 1
#harakiri=10 - disable locally, otherwise autoreload fails
disable-logging=1
spooler-quiet=1
spooler-processes=6
spooler-frequency=5
spooler-harakiri=600
spooler = /tmp/my_spooler
socket-timeout = 60
# few static files - serve the frontend from elsewhere
static-map = /robots.txt=conf/robots.txt
static-map = /favicon.ico=conf/favicon.ico
mule = py/mules/radius.py
mule = py/mules/data_grabber.py
mule = py/mules/syslog.py
mule = py/mules/updater.py
mule = py/mules/firmware.py
[uwsgi-production]
env = FLASK_ENV=production
env = PYSRV_CONFIG_PATH=/conf/server-conf.json
http = host.docker.internal:8181
master = 1
wsgi-file = py/main.py
callable = app
# processes = 1, otherwise autoreload fails
processes = 4
chdir = /app/
pythonpath = /app/py/
#stats = 127.0.0.1:9100
#virtualenv = $(SERVER_VIRTUALENV)
#py-autoreload = 1
#harakiri=10 - disable locally, otherwise autoreload fails
enable-threads = true
vacuum = true
disable-logging=1
spooler-quiet=1
spooler-processes=6
spooler-frequency=5
spooler-harakiri=600
spooler = /tmp/my_spooler
logger = stdio
logto = /app/uwsgi-@(exec://date +%%Y-%%m-%%d).log
log-maxsize = 104857600
socket-timeout = 60
# few static files - serve the frontend from elsewhere
static-map = /robots.txt=conf/robots.txt
static-map = /favicon.ico=conf/favicon.ico
mule = py/mules/radius.py
mule = py/mules/data_grabber.py
mule = py/mules/syslog.py
mule = py/mules/updater.py
mule = py/mules/firmware.py

134
fabfile.py vendored Normal file
View file

@ -0,0 +1,134 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# fabfile.py: automated tasks
# - deploy sources from local machine to test/production server
# - migrate local/server database
#
# Author: Tomi.Mickelsson@iki.fi
import sys
import os
import time
import io
from fabric.api import env, run, task, sudo, local, put
# from fabric.contrib.console import confirm
# from fabric.contrib.project import rsync_project
# from fabric.operations import prompt
# write your own server info here:
TEST_SERVER = "testserver.mydomain.com"
PRODUCTION_SERVER = "www.mydomain.com"
SSH_USER = ""
SSH_PRIVATE_KEY = "~/.ssh/xxx_rsa"
# --------------------------------------------------------------------------
# fabric reads these
env.hosts = [TEST_SERVER]
env.use_ssh_config = True
env.user = SSH_USER
env.remotedir = "/app/"
env.port = 22
env.key_filename = SSH_PRIVATE_KEY
# CREATE ROLE MIKROMAN superuser;
# CREATE USER MIKROMAN;
# create user MIKROMAN with superuser password 'MIKROMAN_MY_PASSWORD';
# alter user MIKROMAN with encrypted password 'MIKROMAN_MY_PASSWORD';
# ALTER ROLE "MIKROMAN" WITH LOGIN;
# --------------------------------------------------------------------------
# DATABASE TASKS
@task
def postgres_migrate_local():
"""Local database migrate"""
local("python scripts/dbmigrate.py")
@task
def postgres_migrate_remote():
"""Server database migrate"""
dir = env.remotedir
cmd = "cd {}; PYTHONPATH={}py PYSRV_CONFIG_PATH={} python3 scripts/dbmigrate.py".format(dir, dir, dir+"real-server-config.json")
print(cmd)
@task
def postgres_run_server():
print("postgres -D /usr/local/var/postgres")
@task
def postgres_list_tables():
sql = "SELECT * FROM pg_catalog.pg_tables WHERE schemaname = 'public'"
print("psql -d tmdb -c \"{}\"".format(sql))
@task
def postgres_list_users():
sql = "SELECT * FROM users"
print("psql -d tmdb -c \"{}\"".format(sql))
@task
def postgres_gen_models():
"""Generate peewee models from database: generated-models.py"""
cmd = "pwiz.py -e postgresql -u tm -P tmdb >generated-models.py"
print(cmd)
# --------------------------------------------------------------------------
# DEPLOY TASKS
@task
def production():
"""Set target host to production server"""
if confirm("DEPLOY PRODUCTION, YOU SURE ??????", default=False):
env.hosts = [PRODUCTION_SERVER]
print("Deploying soon... ", env.hosts[0].upper())
# wait a little so you can still stop...
time.sleep(5)
else:
print("Exiting")
sys.exit(1)
@task
def deploy():
"""Deploy current local sources to server + db migration"""
rsync_files()
postgres_migrate_remote()
# touch VERSION, uwsgi will then restart automatically
data = io.StringIO("%d" % time.time())
put(data, "/app/VERSION", use_sudo=False)
def rsync_files():
"""rsync source files to remote server"""
exclude_list = ['*.pyc', '.git', '.DS_Store', 'node_modules', '__pycache__',
'doc', 'trash']
rsync_project(env.remotedir, local_dir=".", delete=False,
default_opts='-hrvz', exclude=exclude_list,
extra_opts=' -O --no-perms --checksum')
@task
def deploy_mydaemon():
"""Update uwsgi master config conf/pydaemon.service, then restart"""
sudo("systemctl stop pydaemon", warn_only=True)
put("conf/pydaemon.service", "/etc/systemd/system/", use_sudo=True)
sudo("systemctl enable pydaemon")
sudo("systemctl daemon-reload")
sudo("systemctl start pydaemon")
if __name__ == '__main__':
postgres_migrate_remote()

7
migratedb-dcoker.py Normal file
View file

@ -0,0 +1,7 @@
"""Server database migrate"""
import subprocess
dir ="/app/"
cmd = "cd {}; PYTHONPATH={}py PYSRV_CONFIG_PATH={} python3 scripts/dbmigrate.py".format(dir, dir, "/opt/mikrowizard/server-conf.json")
subprocess.Popen(cmd, shell=True)

7
migratedb.py Normal file
View file

@ -0,0 +1,7 @@
"""Server database migrate"""
import subprocess
dir ="/app/"
cmd = "cd {}; PYTHONPATH={}py PYSRV_CONFIG_PATH={} python3 scripts/dbmigrate.py".format(dir, dir, "/app/real-server-config.json")
subprocess.Popen(cmd, shell=True)

61
migrations/001_users.py Normal file
View file

@ -0,0 +1,61 @@
"""Peewee migrations -- 001_create.py.
Some examples:
> Model = migrator.orm['model_name'] # Return model in current state by name
> migrator.sql(sql) # Run custom SQL
> migrator.python(func, *args, **kwargs) # Run python code
> migrator.create_model(Model) # Create a model
> migrator.remove_model(model, cascade=True) # Remove a model
> migrator.add_fields(model, **fields) # Add fields to a model
> migrator.change_fields(model, **fields) # Change fields
> migrator.remove_fields(model, *field_names, cascade=True)
> migrator.rename_field(model, old_field_name, new_field_name)
> migrator.rename_table(model, new_table_name)
> migrator.add_index(model, *col_names, unique=False)
> migrator.drop_index(model, *col_names)
> migrator.add_not_null(model, *field_names)
> migrator.drop_not_null(model, *field_names)
> migrator.add_default(model, field_name, default)
"""
def migrate(migrator, database, fake=False, **kwargs):
"""Write your migrations here."""
# create extension manually - you must be a superuser to do this
# is needed by uuid_generate_v4()
# migrator.sql("""CREATE EXTENSION IF NOT EXISTS "uuid-ossp";""")
migrator.sql("""CREATE TYPE type_user_role AS ENUM (
'disabled',
'admin',
'superuser',
'user')
""")
migrator.sql("""CREATE TABLE users (
id uuid PRIMARY KEY NOT NULL DEFAULT uuid_generate_v4(),
username text UNIQUE,
password text,
first_name text,
last_name text,
role type_user_role DEFAULT 'user',
tags text[],
hash text DEFAULT Null,
email text DEFAULT Null,
adminperms text DEFAULT Null,
created timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
modified timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP
)""")
# normal integer-id: id serial PRIMARY KEY NOT NULL,
def rollback(migrator, database, fake=False, **kwargs):
"""Write your rollback migrations here."""

40
migrations/002_devices.py Normal file
View file

@ -0,0 +1,40 @@
# 002_devices.py
def migrate(migrator, database, fake=False, **kwargs):
# an example class for demonstrating CRUD...
migrator.sql("""CREATE TABLE devices(
id serial PRIMARY KEY NOT NULL,
name text,
ip text,
mac text UNIQUE,
details text,
uptime text,
license text,
interface text,
user_name text,
password text,
port text,
update_availble boolean,
current_firmware text,
arch text,
upgrade_availble boolean,
sensors text,
router_type text,
wifi_config text,
peer_ip text,
failed_attempt int DEFAULT 0,
syslog_configured boolean,
status text NOT NULL DEFAULT 'done',
firmware_to_install text,
owner uuid REFERENCES users(id),
created timestamp not null default CURRENT_TIMESTAMP,
modified timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE devices""")

View file

@ -0,0 +1,20 @@
# 003_sysconfig.py
def migrate(migrator, database, fake=False, **kwargs):
# an example class for demonstrating CRUD...
migrator.sql("""CREATE TABLE sysconfig(
id serial PRIMARY KEY NOT NULL,
key text UNIQUE,
value text,
created_by uuid REFERENCES users(id),
created timestamp not null default CURRENT_TIMESTAMP,
modified timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE sysconfig""")

View file

@ -0,0 +1,21 @@
# 004_device_groups.py
def migrate(migrator, database, fake=False, **kwargs):
# an example class for demonstrating CRUD...
migrator.sql("""CREATE TABLE device_groups(
id serial PRIMARY KEY NOT NULL,
name text,
owner uuid REFERENCES users(id),
created timestamp not null default CURRENT_TIMESTAMP,
modified timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE device_groups""")

View file

@ -0,0 +1,22 @@
# 005_device_groups_devices_rel.py
def migrate(migrator, database, fake=False, **kwargs):
# an example class for demonstrating CRUD...
migrator.sql("""CREATE TABLE device_groups_devices_rel(
id serial PRIMARY KEY NOT NULL,
group_id serial REFERENCES device_groups(id),
device_id serial REFERENCES devices(id),
created timestamp not null default CURRENT_TIMESTAMP,
modified timestamp not null default CURRENT_TIMESTAMP,
UNIQUE(group_id, device_id)
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE device_groups_devices_rel""")

22
migrations/006_tasks.py Normal file
View file

@ -0,0 +1,22 @@
# 006_tasks.py
def migrate(migrator, database, fake=False, **kwargs):
# an example class for demonstrating CRUD...
migrator.sql("""CREATE TABLE tasks(
id serial PRIMARY KEY NOT NULL,
signal int UNIQUE,
name text,
starttime timestamp not null default CURRENT_TIMESTAMP,
endtime timestamp not null default CURRENT_TIMESTAMP,
status boolean
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE tasks""")

25
migrations/007_events.py Normal file
View file

@ -0,0 +1,25 @@
# 009_events.py
def migrate(migrator, database, fake=False, **kwargs):
# an example class for demonstrating CRUD...
migrator.sql("""CREATE TABLE events(
id bigserial PRIMARY KEY NOT NULL,
devid bigint REFERENCES devices(id),
eventtype text,
comment text,
status boolean,
detail text,
level text,
src text,
fixtime timestamp null default null,
eventtime timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE events""")

20
migrations/008_backups.py Normal file
View file

@ -0,0 +1,20 @@
# 013_backups.py
def migrate(migrator, database, fake=False, **kwargs):
# an example class for demonstrating CRUD...
migrator.sql("""CREATE TABLE backups(
id serial PRIMARY KEY NOT NULL,
devid bigint REFERENCES devices(id),
dir text,
filesize int,
created timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE backups""")

View file

@ -0,0 +1,29 @@
# 014_authorization.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TYPE type_auth AS ENUM (
'loggedin',
'loggedout',
'failed')
""")
migrator.sql("""CREATE TABLE auth(
id serial PRIMARY KEY NOT NULL,
devid bigint REFERENCES devices(id),
ltype type_auth,
ip text,
by text,
username text,
started bigint DEFAULT 0,
ended bigint DEFAULT 0,
sessionid text DEFAULT Null,
message text DEFAULT Null,
created timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE auth""")

23
migrations/010_account.py Normal file
View file

@ -0,0 +1,23 @@
# 015_account.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE account(
id serial PRIMARY KEY NOT NULL,
devid bigint REFERENCES devices(id),
message text,
action text,
section text,
username text,
config text,
address text,
ctype text,
created timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE account""")

View file

@ -0,0 +1,25 @@
# 021_user_tasks.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE user_tasks(
id serial PRIMARY KEY NOT NULL,
name text,
description text,
dev_ids text,
snippetid int,
data text,
cron text,
action text,
task_type text,
selection_type text,
desc_cron text,
created timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE user_tasks""")

View file

@ -0,0 +1,18 @@
# 023_snippets.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE snippets(
id serial PRIMARY KEY NOT NULL,
name text,
description text,
content text,
created timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE snippets""")

View file

@ -0,0 +1,18 @@
# 027_permissions.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE permissions(
id serial PRIMARY KEY NOT NULL,
name text,
perms text,
created timestamp not null default CURRENT_TIMESTAMP,
modified timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE permissions""")

View file

@ -0,0 +1,19 @@
# 029_user_group_perm.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE user_group_perm_rel(
id serial PRIMARY KEY NOT NULL,
group_id serial REFERENCES device_groups(id),
user_id uuid REFERENCES users(id),
perm_id serial REFERENCES permissions(id),
UNIQUE(group_id, user_id)
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE user_group_perm_rel""")

View file

@ -0,0 +1,20 @@
# 030_firmware.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE firmware(
id serial PRIMARY KEY NOT NULL,
version text NOT NULL,
location text NOT NULL,
architecture text NOT NULL,
sha256 text NOT NULL,
created timestamp not null default CURRENT_TIMESTAMP,
UNIQUE(version, architecture)
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE firmware""")

View file

@ -0,0 +1,16 @@
# 032_task_group_dev_rel.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE task_group_dev_rel(
id serial PRIMARY KEY NOT NULL,
utask_id serial REFERENCES user_tasks(id) ,
group_id bigint NULL REFERENCES device_groups(id) default null,
device_id bigint NULL REFERENCES devices(id) default null,
UNIQUE(utask_id, group_id , device_id)
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE task_group_dev_rel""")

View file

@ -0,0 +1,17 @@
# 027_permissions.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE task_results(
id serial PRIMARY KEY NOT NULL,
task_type text,
result text,
created timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE task_results""")

20
migrations/018_syslogs.py Normal file
View file

@ -0,0 +1,20 @@
# 038_syslogs.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE syslogs(
id serial PRIMARY KEY NOT NULL,
user_id uuid REFERENCES users(id),
action text,
section text,
data text,
ip text,
agent text,
created timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE syslogs""")

View file

@ -0,0 +1,17 @@
# 038_device_radio.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE device_radio(
id serial PRIMARY KEY NOT NULL,
devid bigint REFERENCES devices(id),
peer_dev_id bigint REFERENCES devices(id),
data text,
external_id text,
mac text,
created timestamp not null default CURRENT_TIMESTAMP
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE device_radio""")

View file

@ -0,0 +1,23 @@
# 001_init.py
def migrate(migrator, database, fake=False, **kwargs):
"""Write your migrations here."""
migrator.sql("""CREATE TABLE users (
id INTEGER PRIMARY KEY,
created timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
modified timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
email text UNIQUE,
password text,
first_name text,
last_name text,
role text DEFAULT 'readonly',
tags text
)""")
def rollback(migrator, database, fake=False, **kwargs):
"""Write your rollback migrations here."""

View file

@ -0,0 +1,20 @@
# 002_movies.py
def migrate(migrator, database, fake=False, **kwargs):
migrator.sql("""CREATE TABLE movies(
id INTEGER PRIMARY KEY,
created timestamp not null default CURRENT_TIMESTAMP,
modified timestamp not null default CURRENT_TIMESTAMP,
creator integer REFERENCES users(id),
title text,
director text
)""")
def rollback(migrator, database, fake=False, **kwargs):
migrator.sql("""DROP TABLE movies""")

1
py/_version.py Normal file
View file

@ -0,0 +1 @@
__version__ = "1.0.0"

438
py/api/api_account.py Normal file
View file

@ -0,0 +1,438 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# api_account.py: API For managing accounts and permissions
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from ctypes import util
from flask import request, session, g, jsonify
from libs.util import ISPRO
from libs.db import db,db_permissions,db_user_group_perm,db_groups,db_sysconfig,db_syslog
import json
from libs import webutil,account
from libs.webutil import app, login_required, get_myself , buildResponse
from libs.mschap3.mschap import nt_password_hash
import logging
log = logging.getLogger("api")
@app.route('/api/login', methods = ['POST'])
def login():
"""Logs the user in with username+password.
On success returns the user object,
on error returns error."""
input = request.json or {}
username = input.get('username')
password = input.get('password')
if not username or not password:
return webutil.warn_reply("Missing input")
u = db.get_user_by_username(username)
if not u or not account.check_password(u.password, password):
# error
try:
db_syslog.add_syslog_event(u.id, "User login","Failed login",webutil.get_ip(),webutil.get_agent(),json.dumps({"username":username}))
except:
pass
return webutil.warn_reply("Invalid login credentials")
else:
# success
account.build_session(u, is_permanent=input.get('remember', True))
tz=db_sysconfig.get_sysconfig('timezone')
# log.info("LOGIN OK agent={}".format(webutil.get_agent()))
res={
"username":u.username,
"name":u.username,
"partner_id":u.id,
"uid":u.id,
"first_name":u.first_name,
"last_name":u.last_name,
"role":u.role,
"tags":u.tags,
"tz":tz,
"perms":json.loads(u.adminperms)
}
db_syslog.add_syslog_event(u.id, "User login","Successful login",webutil.get_ip(),webutil.get_agent(),json.dumps({"username":username}))
return buildResponse(res, 200)
@app.route('/api/user/create', methods = ['POST'])
@login_required(role='admin',perm={'users':'write'})
def create_user():
"""Create new user."""
input = request.json or {}
username = input.get('username')
passwd = input.get('password')
email = input.get('email')
fname = input.get('first_name')
lname = input.get('last_name')
role = input.get('role', 'user')
company = input.get('company')
adminperms = input.get('adminperms',[])
userperms = input.get('userperms',[])
if not username or not passwd or not fname or not lname or not role:
resp={"status":"failed","err":"invalid data"}
return buildResponse(resp, 200)
u = db.get_user_by_username(username)
if u:
msg = "User Name already Taken: {}".format(username)
resp={"status":"failed","err":msg}
return buildResponse(resp, 200)
err = account.check_password_validity(passwd)
if err:
err = "Invalid password : {}".format(err)
resp={"status":"failed","err":err}
return buildResponse(resp, 200)
newpass = account.hash_password(passwd)
nthashhex=''.join(list("{:02x}".format(ord(c)) for c in nt_password_hash(passwd)))
# create new user
u = db.User()
u.username = username
u.company = company
u.first_name = fname
u.last_name = lname
u.password = newpass
u.email= email
u.adminperms= json.dumps(adminperms)
u.hash = nthashhex
u.tags = []
u.role = role # set default to what makes sense to your app
u.save(force_insert=True)
account.new_signup_steps(u)
for perm in userperms:
db_user_group_perm.DevUserGroupPermRel.create_user_group_perm(u.id, int(perm['group_id']), int(perm['perm_id']))
db_syslog.add_syslog_event(webutil.get_myself(), "User Managment","Create", webutil.get_ip(),webutil.get_agent(),json.dumps(input))
return buildResponse(u, 200)
@app.route('/api/user/delete' ,methods=['POST'])
@login_required(role='admin', perm={'users':'full'})
def user_delete():
"""Deletes a user. Only for superusers"""
input = request.json or {}
uid = input.get('uid')
try:
u = db.get_user(uid)
except:
u=False
if not u:
msg = "User not found: {}".format(uid)
resp={"status":"failed","err":msg}
return buildResponse(resp, 200)
u.delete_instance(recursive=True)
db_syslog.add_syslog_event(webutil.get_myself(), "User Managment", "Delete", webutil.get_ip(), webutil.get_agent(), json.dumps(input))
return buildResponse({}, 200)
@app.route('/api/user/change_password' ,methods=['POST'])
@login_required
def user_change_password():
"""Changes user password."""
input = request.json or {}
uid = webutil.get_myself().id
oldpass = input.get('oldpass')
newpass = input.get('newpass')
#check if oldpass is correct
try:
u = db.get_user(uid)
except:
u=False
if not u or not account.check_password(u.password, oldpass):
msg = "Current password is incorrect"
resp={"status":"failed","err":msg}
return buildResponse(resp, 200)
err = account.check_password_validity(newpass)
if not err:
newpass = account.hash_password(newpass)
nthashhex=''.join(list("{:02x}".format(ord(c)) for c in nt_password_hash(newpass)))
else:
err = "Invalid password : {}".format(err)
resp={"status":"failed","err":err}
return buildResponse(resp, 200)
u.password = newpass
u.hash = nthashhex
u.save()
db_syslog.add_syslog_event(webutil.get_myself(), "User Managment", "Change Password", webutil.get_ip(), webutil.get_agent(), json.dumps(input))
resp={"status":"success"}
return buildResponse(resp, 200)
@app.route('/api/logout', methods = ['POST'])
@login_required
def logout():
"""Logs out the user, clears the session."""
db_syslog.add_syslog_event(webutil.get_myself(), "User Logout","User Logged out", webutil.get_ip(),webutil.get_agent(),json.dumps({'logout':True}))
session.clear()
return jsonify({}), 200
@app.route('/api/me', methods=['GET', 'POST'])
def me():
"""Return info about me."""
me = get_myself()
if me:
res={
"username":me.username,
"first_name":me.first_name,
"last_name":me.last_name,
"role":me.role,
"tags":me.tags,
"uid":me.id,
"perms":json.loads(me.adminperms),
"tz":db_sysconfig.get_sysconfig('timezone'),
"ISPRO":ISPRO
}
reply = res
else:
reply = {"username":"public","first_name":"guest","last_name":"guest","role":"admin"}
return buildResponse(reply, 200)
@app.route('/api/user/edit', methods = ['POST'])
@login_required(role='admin',perm={'users':'write'})
def user_edit():
"""Edit user info. Only for admins with write perm"""
err=False
input = request.json or {}
uid = input.get('id')
username = input.get('username')
passwd = input.get('password')
email = input.get('email')
fname = input.get('first_name')
lname = input.get('last_name')
role = input.get('role', 'user')
adminperms = input.get('adminperms',[])
if passwd:
err = account.check_password_validity(passwd)
if not err:
newpass = account.hash_password(passwd)
nthashhex=''.join(list("{:02x}".format(ord(c)) for c in nt_password_hash(passwd)))
else:
err = "Invalid password : {}".format(err)
resp={"status":"failed","err":err}
return buildResponse(resp, 200)
try:
u = db.get_user(uid)
except:
u=False
if not u:
msg = "User not found: {}".format(uid)
resp={"status":"failed","err":msg}
return buildResponse(resp, 200)
ucheck = db.get_user_by_username(username)
if ucheck and str(ucheck.id) != uid:
msg = "User Name already Taken: {}".format(username)
resp={"status":"failed","err":msg}
return buildResponse(resp, 200)
if username:
u.username = username
if fname:
u.first_name = fname
if lname:
u.last_name = lname
if role:
u.role = role
if adminperms and str(u.id) != "37cc36e0-afec-4545-9219-94655805868b":
u.adminperms= json.dumps(adminperms)
if email:
u.email= email
if passwd and passwd!="":
u.password = newpass
u.hash = nthashhex
u.save()
resp={"status":"success"}
if err:
resp={"status":"failed","err":err}
db_syslog.add_syslog_event(webutil.get_myself(), "User Managment","Edit", webutil.get_ip(),webutil.get_agent(),json.dumps(input))
return buildResponse(resp, 200)
@app.route('/api/users/list' ,methods=['POST'])
@login_required(role='admin',perm={'users':'read'})
def users():
"""Search list of users. """
input = request.args or {}
page = input.get('page')
size = input.get('size')
search = input.get('search')
reply = list(db.query_users(page, size, search))
return buildResponse(reply, 200)
@app.route('/api/perms/list' ,methods=['POST'])
@login_required(role='admin',perm={'permissions':'read'})
def perms():
"""Search list of perms. """
input = request.args or {}
page = input.get('page')
size = input.get('size')
search = input.get('search')
reply = db_permissions.query_perms(page, size, search).dicts()
for rep in reply:
rep["perms"]=json.loads(rep["perms"])
return buildResponse(reply, 200)
@app.route('/api/perms/create' ,methods=['POST'])
@login_required(role='admin',perm={'permissions':'write'})
def perms_create():
"""Create permission record"""
input = request.json or {}
name = input.get('name')
perms = input.get('perms')
#check if we dont have permission with same name
perm = db_permissions.get_perm_by_name(name)
if perm or name.lower() in ['full','read','write']:
return buildResponse({"status":"failed","err":"Permission with same name already exists"}, 200)
for perm in perms:
if perm not in ["api","ftp","password","read","romon","sniff","telnet","tikapp","winbox","dude",'rest-api',"local","policy","reboot","sensitive","ssh","test","web","write"]:
return buildResponse({"status":"failed", "err":"Invalid permission"}, 200)
perms=json.dumps(perms)
db_permissions.create_perm(name, perms)
# reply = db_permissions.query_perms(page, size, search)
db_syslog.add_syslog_event(webutil.get_myself(), "Perms Managment","Create", webutil.get_ip(),webutil.get_agent(),json.dumps(input))
return buildResponse({}, 200)
@app.route('/api/perms/edit' ,methods=['POST'])
@login_required(role='admin',perm={'permissions':'write'})
def perms_edit():
"""Edit permission record"""
input = request.json or {}
name = input.get('name')
perms = input.get('perms')
id = input.get('id')
#check if we dont have permission with same name
perm = db_permissions.get_perm(id)
if not perm:
return buildResponse({"status":"failed", "err":"Permission not exists"}, 200)
for per in perms:
if per not in ["api","ftp","password","read","romon","sniff","telnet","tikapp","winbox","dude","rest-api","local","policy","reboot","sensitive","ssh","test","web","write"]:
return buildResponse({"status":"failed", "err":"Invalid permission"}, 200)
perms=json.dumps(perms)
#we are not allowed to change default mikrotik groups name
if name.lower() in ['full','read','write']:
return buildResponse({"status":"failed", "err":"Invalid permission name"}, 200)
if perm.name.lower() in ['full','read','write']:
return buildResponse({"status":"failed", "err":"Invalid permission name"}, 200)
perm.name=name
perm.perms=perms
perm.save()
# reply = db_permissions.query_perms(page, size, search)
db_syslog.add_syslog_event(webutil.get_myself(), "Perms Managment","Edit", webutil.get_ip(),webutil.get_agent(),json.dumps(input))
return buildResponse({'status':'success'}, 200)
@app.route('/api/userperms/list' ,methods=['POST'])
@login_required(role='admin',perm={'users':'read'})
def userperms():
"""Search list of userperms."""
input = request.json or {}
uid = input.get('uid')
#check if user exist
user = db.get_user(uid)
if not user:
return buildResponse({"status":"failed", "err":"User not exists"}, 200)
res=[]
reply = db_user_group_perm.DevUserGroupPermRel.get_user_group_perms(uid)
for data in reply:
res.append({"id":data.id,"user_id":data.user_id.id,"group_id":data.group_id.id,"group_name":data.group_id.name,"perm_id":data.perm_id.id,"perm_name":data.perm_id.name})
return buildResponse(res, 200)
@app.route('/api/userperms/create' ,methods=['POST'])
@login_required(role='admin',perm={'users':'write'})
def userperms_create():
"""Create user permission record"""
input = request.json or {}
uid = input.get('uid')
gid = input.get('gid')
pid = input.get('pid')
#check if user exist
user = db.get_user(uid)
if not user:
return buildResponse({"status":"failed", "err":"User not exists"}, 200)
#check if group exist
group = db_groups.get_group(gid)
if not group:
return buildResponse({"status":"failed", "err":"Group not exists"}, 200)
#check if permission exist
perm = db_permissions.get_perm(pid)
if not perm:
return buildResponse({"status":"failed", "err":"Permission not exists"}, 200)
db_user_group_perm.DevUserGroupPermRel.create_user_group_perm(uid, gid, pid)
# reply = db_permissions.query_perms(page, size, search)
db_syslog.add_syslog_event(webutil.get_myself(), "UserPerms Managment","Create", webutil.get_ip(),webutil.get_agent(),json.dumps(input))
return buildResponse({'status':'success'}, 200)
@app.route('/api/userperms/delete' ,methods=['POST'])
@login_required(role='admin', perm={'users':'write'})
def userperms_delete():
"""Delete user permission record"""
input = request.json or {}
id = input.get('id')
if(id == '1' or id == 1):
return buildResponse({"status":"failed", "err":"Cannot delete admin permission"}, 200)
#check if permission exist
perm = db_user_group_perm.DevUserGroupPermRel.get_user_group_perm(id)
if not perm:
return buildResponse({"status":"failed", "err":"Permission not exists"}, 200)
db_user_group_perm.DevUserGroupPermRel.delete_user_group_perm(id)
db_syslog.add_syslog_event(webutil.get_myself(), "UserPerms Managment", "Delete", webutil.get_ip(), webutil.get_agent(), json.dumps(input))
return buildResponse({'status':'success'}, 200)
@app.route('/api/perms/delete' ,methods=['POST'])
@login_required(role='admin', perm={'permissions':'full'})
def perms_delete():
"""Delete permission record"""
input = request.json or {}
id = input.get('id')
#check if permission exist
perm = db_permissions.get_perm(id)
if perm.name in ['full','read','write']:
return buildResponse({"status":"failed", "err":"Cannot delete default permission"}, 200)
if not perm:
return buildResponse({"status":"failed", "err":"Permission not exists"}, 200)
res=db_permissions.delete_perm(id)
if not res:
return buildResponse({"status":"failed", "err":"Unable to Delete Permission"}, 200)
# reply = db_permissions.query_perms(page, size, search)
db_syslog.add_syslog_event(webutil.get_myself(), "Perms Managment","Delete", webutil.get_ip(),webutil.get_agent(),json.dumps(input))
return buildResponse({'status':'success'}, 200)

83
py/api/api_backups.py Normal file
View file

@ -0,0 +1,83 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# api_bakcups.py: API for managing bakcups
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from flask import request, jsonify
from libs.db import db_tasks,db_backups,db_device,db_syslog
from libs import util
from libs.webutil import app, login_required,buildResponse,get_myself,get_ip,get_agent
import bgtasks
import logging
import json
log = logging.getLogger("api.firmware")
@app.route('/api/backup/make', methods = ['POST'])
@login_required(role='admin',perm={'backup':'write'})
def backup_create():
input = request.json
devids=input.get('devids',False)
status=db_tasks.backup_job_status().status
if not status:
db_syslog.add_syslog_event(get_myself(), "Backup Managment","Create", get_ip(),get_agent(),json.dumps(input))
if devids=="0":
all_devices=list(db_device.get_all_device())
bgtasks.backup_devices(devices=all_devices)
else:
devices=db_device.get_devices_by_id(devids)
bgtasks.backup_devices(devices=devices)
return buildResponse([{'status': status}],200)
else:
return buildResponse([{'status': status}],200)
@app.route('/api/backup/list', methods = ['POST'])
@login_required(role='admin',perm={'backup':'read'})
def backup_list():
input = request.json
page = input.get('page')
devid = input.get('devid',False)
size = input.get('size')
search = input.get('search')
backups = db_backups.query_backup_jobs(page, size, search,devid=devid)
reply=[]
for back in backups:
data={}
if back.devid:
dev=back.devid
data['id']=back.id
data['filesize']=util.sizeof_fmt(back.filesize)
data['created']=back.created
data['devname']=dev.name
data['devip']=dev.ip
data['devmac']=dev.mac
else:
data['id']=back.id
data['filesize']=util.sizeof_fmt(back.filesize)
data['created']=back.created
data['devname']='Deleted Device'
data['devip']=''
data['devmac']=''
reply.append(data)
return buildResponse(reply, 200)
@app.route('/api/backup/get', methods = ['POST'])
@login_required(role='admin',perm={'backup':'read'})
def backup_get():
input = request.json
id=input.get('id')
back=db_backups.get_backup(id)
path=back.dir
with open(path, 'r') as file:
file_content = file.read()
return buildResponse({"content":file_content}, 200)
@app.route('/api/backup/status', methods = ['POST'])
@login_required(role='admin',perm={'backup':'read'})
def backup_status():
status=db_tasks.update_check_status().status
return jsonify({'status': status})

559
py/api/api_dev.py Normal file
View file

@ -0,0 +1,559 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# api_bakcups.py: API for managing bakcups
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from flask import request,redirect ,session
import datetime
import html
import config
import re
from libs.red import RedisDB
from libs.webutil import app,buildResponse,login_required,get_myself,get_ip,get_agent
from libs import util
from libs.db import db_device,db_groups,db_user_group_perm,db_user_tasks,db_sysconfig,db_syslog
import logging
import json
from playhouse.shortcuts import model_to_dict
log = logging.getLogger("api")
try:
from libs import utilpro
ISPRO=True
except ImportError:
ISPRO=False
pass
@app.route('/', methods = ['GET'])
def index():
"""Just a redirect to api list."""
if config.IS_PRODUCTION:
return "not available", 400
return redirect('/api/list')
@app.route('/api/dev/list', methods = ['POST'])
@login_required(role='admin',perm={'device':'read'})
def list_devs():
"""Return devs list of assigned to user , all for admin"""
input = request.json
# Get devices that are in the group
group_id = int(input.get('group_id', False))
page = input.get('page')
size = input.get('size')
search = input.get('search',False)
page = int(page or 0)
limit = int(size or 1000)
res = []
try:
# Get devices that current user have access
uid = session.get("userid") or False
if not uid:
return buildResponse({'result':'failed','err':"No User"}, 200)
# Get devices that current user have access
devs=db_user_group_perm.DevUserGroupPermRel.get_user_devices(uid,group_id).paginate(page, limit).dicts()
for dev in devs:
temp=dev
del temp['user_name']
del temp['password']
if ' ' not in temp['uptime']:
temp['uptime'] = temp['uptime'].replace('w',' week ').replace('d',' day ').replace('h',' hour ').replace('m',' min ')
res.append(temp)
except Exception as e:
return buildResponse({'result':'failed','err':str(e)},200)
return buildResponse(res,200)
@app.route('/api/dev/get_editform', methods = ['POST'])
@login_required(role='admin',perm={'device':'full'})
def get_editform():
"""return device editable data"""
input = request.json
# get devices that are in the group
devid = int(input.get('devid', False))
res = {}
try:
dev=db_device.get_device(devid)
if not dev:
return buildResponse({'status': 'failed'}, 200, error="Wrong Data")
res['user_name']=util.decrypt_data(dev['user_name'])
res['password']=util.decrypt_data(dev['password'])
res['ip']=dev['ip']
res['peer_ip']=dev['peer_ip']
res['name']=dev['name']
res['id']=dev['id']
try:
res['ips']=json.loads(db_sysconfig.get_sysconfig('all_ip'))
except Exception as e:
res['ips']=[]
except Exception as e:
log.error(e)
return buildResponse({'status': 'failed'}, 200, error="Wrong Data")
return buildResponse(res,200)
@app.route('/api/dev/save_editform', methods = ['POST'])
@login_required(role='admin', perm={'device':'full'})
def save_editform():
"""save device configuration"""
input = request.json
devid = int(input.get('id', False))
user_name = input.get('user_name', False)
password = input.get('password', False)
ip = input.get('ip', False)
peer_ip = input.get('peer_ip', False)
name = input.get('name', False)
try:
if db_device.update_device(devid, util.crypt_data(user_name), util.crypt_data(password), ip, peer_ip, name):
db_syslog.add_syslog_event(get_myself(), "Device", "Edit", get_ip(),get_agent(),json.dumps(input))
return buildResponse({"result":"success"}, 200)
else:
return buildResponse({"result":"failed","err":"Unable to update device"}, 200)
except Exception as e:
log.error(e)
return buildResponse({"result":"failed","err":str(e)}, 200)
@app.route('/api/devgroup/list', methods = ['POST'])
@login_required(role='admin',perm={'device_group':'read'})
def list_devgroups():
"""return dev groups"""
# build HTML of the method list
devs = []
try:
devs=list(db_groups.query_groups_api())
except Exception as e:
return buildResponse({'result':'failed','err':str(e)},200)
return buildResponse(devs,200)
@app.route('/api/devgroup/delete', methods = ['POST'])
@login_required(role='admin',perm={'device_group':'full'})
def delete_group():
"""delete dev group"""
input = request.json
gid = input.get('gid', False)
try:
if db_user_group_perm.DevUserGroupPermRel.delete_group(gid):
db_syslog.add_syslog_event(get_myself(), "Device Group","Delete", get_ip(),get_agent(),json.dumps(input))
return buildResponse({"result":"success"}, 200)
else:
return buildResponse({"result":"failed",'err':'Unable to delete'}, 200)
except Exception as e:
return buildResponse({"result":"failed",'err':'Unable to delete'}, 200)
@app.route('/api/devgroup/members', methods = ['POST'])
@login_required(role='admin',perm={'device_group':'read','device':'read'})
def list_devgroups_members():
"""return list of dev groups"""
input = request.json
gid=input.get('gid',False)
# get devices that are in the group
devs = []
try:
devs=list(db_groups.devs(gid))
except Exception as e:
return buildResponse({'result':'failed','err':str(e)},200)
return buildResponse(devs,200)
@app.route('/api/devgroup/update_save_group', methods = ['POST'])
@login_required(role='admin',perm={'device_group':'write','device':'read'})
def update_save_group():
"""save device group config"""
input = request.json
devids= input.get('array_agg', False)
name = input.get('name', False)
id = input.get('id', False)
# First check if we are editiong or creating new group
# if id is 0 then we are creating new group
if id==0:
# create new group and add devices to it
try:
group=db_groups.create_group(name)
if group:
db_syslog.add_syslog_event(get_myself(), "Device Group","Create", get_ip(),get_agent(),json.dumps(input))
gid=group.id
db_groups.add_devices_to_group(gid,devids)
else:
return buildResponse({'result':'failed','err':"Group not created"}, 200)
return buildResponse({"result":"success"}, 200)
except Exception as e:
return buildResponse({'result':'failed','err':str(e)}, 200)
else:
# update group and add devices to it
try:
group=db_groups.update_group(id, name)
db_groups.add_devices_to_group(group.id, devids)
#get all dev ids from group and compare to devids,remove devs not availble in devids
devs=db_groups.devs2(id)
ids=[]
for dev in devs:
ids.append(dev.id)
dev_to_remove=list(set(ids)-set(devids))
db_groups.delete_from_group(dev_to_remove)
db_syslog.add_syslog_event(get_myself(), "Device Group","Update", get_ip(),get_agent(),json.dumps(input))
return buildResponse({"result":"success"}, 200)
except Exception as e:
return buildResponse({'result':'failed','err':str(e)}, 200)
@app.route('/api/search/groups', methods = ['POST'])
@login_required(role='admin',perm={'device_group':'read','device':'read'})
def search_groups():
"""search in devices"""
input = request.json
searchstr=input.get('searchstr',False)
dev_groups = []
group=db_groups.DevGroups
try:
if searchstr and searchstr!="":
# find device groups that contains searchstr in the name
dev_groups = (group
.select()
.where(group.name.contains(searchstr))
.dicts())
else:
# return first 10 ordered alphabeticaly
dev_groups = (group
.select()
.order_by(group.name)
.limit(10)
.dicts())
except Exception as e:
return buildResponse({'result':'failed','err':str(e)},200)
return buildResponse(dev_groups,200)
@app.route('/api/search/devices', methods = ['POST'])
@login_required(role='admin',perm={'device':'read'})
def search_devices():
"""search in groups"""
input = request.json
searchstr=input.get('searchstr',False)
# build HTML of the method list
device=db_device.Devices
searchstr=input.get('searchstr',False)
devs = []
try:
if searchstr and searchstr!="":
# find devices that contains searchstr in the name
devs = (device
.select()
.where(device.name.contains(searchstr))
.dicts())
else:
# return first 10 ordered alphabeticaly
devs = (device
.select()
.order_by(device.name)
.limit(10)
.dicts())
except Exception as e:
return buildResponse({'result':'failed','err':str(e)},200)
return buildResponse(devs,200)
@app.route('/api/taskmember/details', methods = ['POST'])
@login_required(role='admin',perm={'device_group':'read','device':'read'})
def get_taskmember_details():
"""search in groups"""
# build HTML of the method list
input = request.json
tid=input.get('taskid',False)
if not tid:
return buildResponse({"success":'failed',"err":"Wrong task"},200)
res=[]
utask=db_user_tasks.UserTasks.get_utask_by_id(tid)
members=db_user_tasks.get_task_devices(utask,False)
if utask.selection_type=="groups":
for group in members:
tmp = model_to_dict(group)
res.append({"id":tmp['id'], "name":tmp['name']})
else:
for dev in members:
tmp = model_to_dict(dev)
res.append({"id":tmp['id'],"name":tmp['name'],"mac":tmp['mac']})
return buildResponse(res,200)
@app.route('/api/dev/info', methods = ['POST'])
@login_required(role='admin',perm={'device':'read'})
def dev_info():
"""return dev info"""
input = request.json
devid=input.get('devid',False)
if not devid or not isinstance(devid, int):
return buildResponse({'status': 'failed'},200,error="Wrong Data")
res=db_device.get_device(devid)
options=util.build_api_options(db_device.get_devices_by_id([res['id'],])[0])
network_info=[]
try:
if util.check_port(options['host'],options['port']):
router=util.RouterOSCheckResource(options)
network_info=util.get_network_data(router)
del network_info['total']
except:
pass
interfaces=[]
for iface in network_info:
interfaces.append(network_info[iface])
#fix and change some data
res['interfaces']=interfaces
res.pop('user_name')
res.pop('password')
res.pop('wifi_config')
res['created']=res['created'].strftime("%Y-%m-%d %H:%M:%S")
res['modified']=res['modified'].strftime("%Y-%m-%d %H:%M:%S")
#get data from redis
if ISPRO:
res['is_radio']=utilpro.check_is_radio(res['id'])
try:
del res['sensors']
except Exception as e:
log.error(e)
return buildResponse({'status': 'failed'}, 200, error="Wrong Data")
pass
return buildResponse(res,200)
@app.route('/api/dev/sensors', methods = ['POST'])
@login_required(role='admin',perm={'device':'read'})
def dev_sensors():
"""return dev sensors chart data"""
input = request.json
devid=input.get('devid',False)
total=input.get('total','bps')
delta=input.get('delta',"5m")
if delta not in ["5m","1h","daily","live"]:
return buildResponse({'status': 'failed'},200,error="Wrong Data")
if not devid or not isinstance(devid, int):
return buildResponse({'status': 'failed'},200,error="Wrong Data")
dev=db_device.get_device(devid)
if delta=="5m":
start_time=datetime.datetime.now()-datetime.timedelta(minutes=5*24)
elif delta=="1h":
start_time=datetime.datetime.now()-datetime.timedelta(hours=24)
elif delta=="daily":
start_time=datetime.datetime.now()-datetime.timedelta(days=30)
else:
start_time=datetime.datetime.now()-datetime.timedelta(days=30)
end_time=datetime.datetime.now()
try:
res={}
res['sensors']=json.loads(dev['sensors'])
redopts={
"dev_id":dev['id'],
"keys":res['sensors'],
"start_time":start_time,
"end_time":end_time,
"delta":delta,
}
colors={
'backgroundColor': 'rgba(77,189,116,.2)',
'borderColor': '#4dbd74',
'pointHoverBackgroundColor': '#fff'
}
reddb=RedisDB(redopts)
data=reddb.get_dev_data_keys()
tz=db_sysconfig.get_sysconfig('timezone')
res["radio-sensors"]=[]
for key in res['sensors'][:]:
if "rx" in key or "tx" in key or "rxp" in key or "txp" in key or "radio" in key:
if "radio" in key:
res["radio-sensors"].append(key)
if not 'total' in key:
res['sensors'].remove(key)
continue
if "total" in key:
if total=='bps' and 'rx/tx-total' in res['sensors'] and 'rx/tx-total' in res['sensors']:
continue
if total!='bps' and 'rxp/txp-total' in res['sensors'] and 'rxp/txp-total' in res['sensors']:
continue
temp=[]
ids=['yA','yB']
colors=['#17522f','#171951']
datasets=[]
lables=[]
data_keys=['tx-total','rx-total']
if total!='bps':
data_keys=['txp-total','rxp-total']
for idx, val in enumerate(data_keys) :
for d in data[val]:
if len(lables) <= len(data[val]):
edatetime=datetime.datetime.fromtimestamp(d[0]/1000)
lables.append(util.utc2local(edatetime,tz=tz).strftime("%m/%d/%Y, %H:%M:%S %Z"))
temp.append(round(d[1],1))
datasets.append({'borderColor': colors[idx],'type': 'line','yAxisID': ids[idx],'data':temp,'unit':val.split("-")[0],'backgroundColor': colors[idx],'pointHoverBackgroundColor': '#fff'})
temp=[]
if total=='bps':
res["rx/tx-total"]={'labels':lables,'datasets':datasets}
res['sensors'].append("rx/tx-total")
else:
res["rxp/txp-total"]={'labels':lables,'datasets':datasets}
res['sensors'].append("rxp/txp-total")
else:
temp={"labels":[],"data":[]}
for d in data[key]:
edatetime=datetime.datetime.fromtimestamp(d[0]/1000)
temp["labels"].append(util.utc2local(edatetime,tz=tz).strftime("%m/%d/%Y, %H:%M:%S %Z"))
temp["data"].append(round(d[1],1))
res[key]={'labels':temp["labels"],'datasets':[{'data':temp['data'],'backgroundColor': 'rgba(77,189,116,.2)','borderColor': '#fff','pointHoverBackgroundColor': '#fff'}]}
if 'rxp-total' in res['sensors']:
res['sensors'].remove('txp-total')
res['sensors'].remove('rxp-total')
elif 'rx-total' in res['sensors']:
res['sensors'].remove('tx-total')
res['sensors'].remove('rx-total')
except Exception as e:
log.error(e)
return buildResponse({'status': 'failed'}, 200, error="Error in generating data")
pass
return buildResponse(res,200)
@app.route('/api/dev/ifstat', methods = ['POST'])
@login_required(role='admin',perm={'device':'read'})
def dev_ifstat():
"""return device interfaces info"""
input = request.json
devid=input.get('devid',False)
chart_type=input.get('type','bps')
delta=input.get('delta',"5m")
interface=input.get('interface',False)
if delta not in ["5m","1h","daily","live"]:
return buildResponse({'status': 'failed'},200,error="Wrong Data")
if not devid or not isinstance(devid, int):
return buildResponse({'status': 'failed'},200,error="Wrong Data")
res=db_device.get_device(devid)
if delta=="5m":
start_time=datetime.datetime.now()-datetime.timedelta(minutes=5*24)
elif delta=="1h":
start_time=datetime.datetime.now()-datetime.timedelta(hours=24)
elif delta=="daily":
start_time=datetime.datetime.now()-datetime.timedelta(days=30)
else:
start_time=datetime.datetime.now()-datetime.timedelta(days=30)
end_time=datetime.datetime.now()
#Fix and change some data
#Get data from redis
res['name']="Device : " + db_device.get_device(devid)['name'] + " - Interface : " + interface
try:
res['sensors']=json.loads(res['sensors'])
for sensor in res['sensors'][:]:
regex=r'.*{}$'.format(interface)
if not bool(re.match(regex,sensor)):
res['sensors'].remove(sensor)
redopts={
"dev_id":res['id'],
"keys":res['sensors'],
"start_time":start_time,
"end_time":end_time,
"delta":delta,
}
colors={
'backgroundColor': 'rgba(77,189,116,.2)',
'borderColor': '#4dbd74',
'pointHoverBackgroundColor': '#fff'
}
reddb=RedisDB(redopts)
data=reddb.get_dev_data_keys()
temp=[]
ids=['yA','yB']
colors=['#17522f','#171951']
datasets=[]
lables=[]
tz=db_sysconfig.get_sysconfig('timezone')
data_keys=['tx-{}'.format(interface),'rx-{}'.format(interface)]
if chart_type=='bps':
data_keys=['tx-{}'.format(interface),'rx-{}'.format(interface)]
elif chart_type=='pps':
data_keys=['txp-{}'.format(interface),'rxp-{}'.format(interface)]
for idx, val in enumerate(data_keys):
for d in data[val]:
if len(lables) <= len(data[val]):
edatetime=datetime.datetime.fromtimestamp(d[0]/1000)
lables.append(util.utc2local(edatetime,tz=tz).strftime("%m/%d/%Y, %H:%M:%S %Z"))
temp.append(round(d[1],1))
datasets.append({'label':val,'borderColor': colors[idx],'type': 'line','yAxisID': ids[idx],'data':temp,'unit':val.split("-")[0],'backgroundColor': colors[idx],'pointHoverBackgroundColor': '#fff'})
temp=[]
res["data"]={'labels':lables,'datasets':datasets}
except Exception as e:
log.error(e)
return buildResponse({'status': 'failed'}, 200, error="Error in generating data")
pass
return buildResponse(res,200)
@app.route('/api/dev/delete', methods = ['POST'])
@login_required(role='admin',perm={'device':'full'})
def dev_delete():
"""return dev info"""
input = request.json
devids=input.get('devids', False)
res={}
# ToDo: we need to delete redis keys also
try:
for dev in devids:
if db_groups.delete_device(dev):
db_syslog.add_syslog_event(get_myself(), "Device","Delete", get_ip(),get_agent(),json.dumps(input))
res['status']='success'
else:
res['status'] = 'failed'
res['err'] = 'Unable to Delete Device'
except Exception as e:
log.error(e)
return buildResponse({'status': 'failed'}, 200, error=str(e))
return buildResponse(res, 200)
#Development tool , We dont want this in production
@app.route('/api/list', methods = ['GET'])
def list_api():
"""List the available REST APIs in this service as HTML. Queries
methods directly from Flask, no need to maintain separate API doc.
(Maybe this could be used as a start to generate Swagger API spec too.)"""
# decide whether available in production
if config.IS_PRODUCTION:
return "not available in production", 400
# build HTML of the method list
apilist = []
rules = sorted(app.url_map.iter_rules(), key=lambda x: str(x))
for rule in rules:
f = app.view_functions[rule.endpoint]
docs = f.__doc__ or ''
module = f.__module__ + ".py"
# remove noisy OPTIONS
methods = sorted([x for x in rule.methods if x != "OPTIONS"])
url = html.escape(str(rule))
if not "/api/" in url and not "/auth/" in url:
continue
apilist.append("<div><a href='{}'><b>{}</b></a> {}<br/>{} <i>{}</i></div>".format(
url, url, methods, docs, module))
header = """<body>
<title>MikroWizard Generated API LIST</title>
<style>
body { width: 80%; margin: 20px auto;
font-family: Courier; }
section { background: #eee; padding: 40px 20px;
border: 1px dashed #aaa; }
i { color: #888; }
</style>"""
title = """
<section>
<h2>REST API ({} end-points)</h2>
<h3>IS_PRODUCTION={} IS_LOCAL_DEV={} Started ago={}</h3>
""".format(len(apilist), config.IS_PRODUCTION, config.IS_LOCAL_DEV,
config.started_ago(True))
footer = "</section></body>"
return header + title + "<br/>".join(apilist) + footer

151
py/api/api_firmware.py Normal file
View file

@ -0,0 +1,151 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# api_firmware.py: API for managing firmware
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from flask import request, jsonify,session,send_file
from playhouse.shortcuts import model_to_dict
import datetime
from libs.db import db_tasks,db_sysconfig,db_device,db_firmware,db_syslog
from libs import util
from libs.webutil import app, login_required, get_myself,buildResponse,get_myself,get_ip,get_agent
import bgtasks
import re
import logging
log = logging.getLogger("api.firmware")
import json
@app.route('/api/firmware/check_firmware_update', methods = ['POST'])
@login_required(role='admin',perm={'device':'write'})
def check_firmware_update():
"""Chck fimware update status"""
input = request.json
devids=input.get('devids',"0")
status=db_tasks.update_check_status().status
uid = session.get("userid") or False
if not uid:
return buildResponse({'result':'failed','err':"No User"}, 200)
#if devices is [0] then check firmware for all devices of user
if not status:
bgtasks.check_devices_for_update(devices=devids,uid=uid)
db_syslog.add_syslog_event(get_myself(), "Firmware","Check", get_ip(),get_agent(),json.dumps(input))
res={'status': True}
else:
res={'status': status}
return buildResponse(res,200)
@app.route('/api/firmware/check_task_status', methods = ['GET'])
@login_required(role='admin',perm={'device':'read'})
def check_task_status():
"""Return firmware update check service status"""
status=db_tasks.update_check_status().status
return jsonify({'status': status})
@app.route('/api/firmware/update_firmware', methods = ['POST'])
@login_required(role='admin',perm={'device':'full'})
def update_device():
"""Update devices"""
status=db_tasks.update_job_status().status
input=request.json
devids=input.get('devids',"0")
uid = session.get("userid") or False
if not uid:
return buildResponse({'result':'failed','err':"No User"}, 200)
if not status:
db_syslog.add_syslog_event(get_myself(), "Firmware","update", get_ip(),get_agent(),json.dumps(input))
bgtasks.update_device(devices=devids,uid=uid)
res={'status': True}
else:
res={'status': status}
return buildResponse(res,200)
@app.route('/api/firmware/get_firms', methods = ['POST'])
@login_required(role='admin',perm={'settings':'full'})
def get_firms():
"""get list of of downloaded firmwares in local repo"""
input = request.json or {}
page = input.get('page')
size = input.get('size')
search = input.get('search')
reply = db_firmware.query_firms(page, size, search).dicts()
data={
"firms":reply,
"updateBehavior":db_sysconfig.get_sysconfig("old_firmware_action"),
"firmwaretoinstall":db_sysconfig.get_sysconfig("latest_version"),
"firmwaretoinstallv6":db_sysconfig.get_sysconfig("old_version"),
}
return buildResponse(data, 200)
@app.route('/api/firmware/get_downloadable_firms', methods = ['POST'])
@login_required(role='admin',perm={'settings':'full'})
def get_downloadable_firms():
"""get list of availble Firmwares from Mikrotik Official webstire"""
input = request.json or {}
versions=util.get_mikrotik_versions()
versions = sorted(versions, key=lambda x: [int(y) if y.isdigit() else int(re.sub(r'\D', '', y)) for y in x.split('.')])
return buildResponse({"versions":versions}, 200)
@app.route('/api/firmware/download_firmware_to_repository', methods = ['POST'])
@login_required(role='admin',perm={'settings':'full'})
def download_firmware_to_repository():
"""Download Firmware from Mikrotik Official website"""
input = request.json or {}
version=input.get('version')
status=db_tasks.downloader_job_status().status
if not status:
db_syslog.add_syslog_event(get_myself(), "Firmware","Download", get_ip(),get_agent(),json.dumps(input))
bgtasks.download_firmware(version=version)
return buildResponse({'status': True}, 200)
else:
return buildResponse({'status': status}, 200)
@app.route('/api/firmware/update_firmware_settings', methods = ['POST'])
@login_required(role='admin',perm={'settings':'write'})
def update_firmware_settings():
"""Change system settings for firmware update"""
input = request.json or {}
updateBehavior=input.get('updatebehavior')
firmwaretoinstall=input.get('firmwaretoinstall')
firmwaretoinstallv6=input.get('firmwaretoinstallv6')
db_sysconfig.update_sysconfig("old_firmware_action", updateBehavior)
db_sysconfig.update_sysconfig("latest_version", firmwaretoinstall)
db_sysconfig.update_sysconfig("old_version", firmwaretoinstallv6)
db_syslog.add_syslog_event(get_myself(), "Firmware","settings", get_ip(),get_agent(),json.dumps(input))
return buildResponse({'status': True}, 200)
def serialize_datetime(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
@app.route('/api/firmware/get_firmware/<firmid>', methods = ['POST','GET'])
def get_firmware(firmid):
"""Download firmware of given id from repo"""
firm=db_firmware.get_firm(firmid)
dev_ip=request.remote_addr
# log.error(dev_ip)
# if dev_ip:
# dev=db_device.query_device_by_ip(dev_ip)
# if not dev:
# return buildResponse({'result':'failed', 'err':"Device not found"}, 200)
if firm:
log.error(dev_ip)
data={
"devip":dev_ip,
# "devid":dev.id,
"firm":model_to_dict(firm),
}
db_syslog.add_syslog_event(get_myself(), "Firmware","download", get_ip(),get_agent(),json.dumps(data,default=serialize_datetime))
# if dev.arch != firm.architecture:
# return buildResponse({'result':'failed','err':"Wrong architecture"}, 200)
path=firm.location
return send_file(path, as_attachment=True)
# log.error(dev)
return buildResponse({'result':'failed','err':"somthing went wrong"}, 200)

496
py/api/api_logs.py Normal file

File diff suppressed because one or more lines are too long

39
py/api/api_scanner.py Normal file
View file

@ -0,0 +1,39 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# api_scanner.py: API for device scanner in network
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from flask import request
from libs.db import db_tasks,db_syslog
from libs.webutil import app, login_required,buildResponse,get_myself,get_ip,get_agent
import bgtasks
import json
import logging
log = logging.getLogger("api.scanner")
@app.route('/api/scanner/scan', methods = ['POST'])
@login_required(role='admin',perm={'device':'full'})
def scan_network():
"""Do scan requested network for given ip range to find mikrotik devices"""
input = request.json
start=input.get('start',False)
end=input.get('end',False)
port=input.get('port',8728)
if not port:
port=8728
password=input.get('password',False)
username=input.get('user',False)
status=db_tasks.scanner_job_status().status
if not status:
if start and end and port:
db_syslog.add_syslog_event(get_myself(), "Scanner","start", get_ip(),get_agent(),json.dumps(input))
bgtasks.scan_with_ip(start=start,end=end,port=port,password=password,username=username)
return buildResponse({'status': True},200)
else:
return buildResponse({'status': status},200)
else:
return buildResponse({'status': status},200)

98
py/api/api_snippet.py Normal file
View file

@ -0,0 +1,98 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# api_snippet.py: API for code snippets
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from flask import request
from libs.db import db_user_tasks,db_syslog
from libs.webutil import app, login_required,buildResponse,get_myself,get_ip,get_agent
from functools import reduce
import operator
import logging
import json
log = logging.getLogger("api.snippet")
@app.route('/api/snippet/list', methods = ['POST'])
@login_required(role='admin',perm={'snippet':'read'})
def user_snippet_list():
"""return snippets list """
input = request.json
name=input.get('name',False)
description=input.get('description',False)
content=input.get('content',False)
snips=db_user_tasks.Snippets
page=input.get('page',0)
size=input.get('size',10000)
# build where query
clauses = []
if name and name!="":
clauses.append(snips.name.contains(name))
if description and description!="":
clauses.append(snips.description.contains(description))
if content and content!="":
clauses.append(snips.content == content)
expr=""
logs = []
selector=[snips.id,snips.name,snips.description,snips.content,snips.created]
try:
if len(clauses):
expr = reduce(operator.and_, clauses)
query=snips.select(*selector).where(expr)
else:
query=snips.select(*selector)
query=query.order_by(snips.id.desc())
query=query.paginate(page,size)
logs=list(query.dicts())
except Exception as e:
return buildResponse({"status":"failed", "err":str(e)},400)
return buildResponse(logs,200)
@app.route('/api/snippet/save', methods = ['POST'])
@login_required(role='admin',perm={'snippet':'write'})
def user_snippet_save():
"""save or create snippets"""
input = request.json
id=input.get('id', 0)
name=input.get('name', False)
description=input.get('description', False)
content=input.get('content', False)
# if id is 0 then we are creating new snippet
# else edit the snippet with provided id
if id==0:
snippet=db_user_tasks.get_snippet_by_name(name)
if snippet:
return buildResponse({"result":"failed","err":"Snippet already exists"}, 200)
snippet=db_user_tasks.create_snippet(name,description,content)
if snippet:
db_syslog.add_syslog_event(get_myself(), "Snippet","Create", get_ip(),get_agent(),json.dumps(input))
return buildResponse({"result":"success"}, 200)
else:
return buildResponse({"result":"failed","err":"Snippet create failed"}, 200)
else:
snippet=db_user_tasks.get_snippet(id)
if snippet:
db_syslog.add_syslog_event(get_myself(), "Snippet","Update", get_ip(),get_agent(),json.dumps(input))
snippet=db_user_tasks.update_snippet(id, name, description, content)
return buildResponse({"result":"success"}, 200)
else:
return buildResponse({"result":"failed","err":"Snippet not found"}, 200)
@app.route('/api/snippet/delete', methods = ['POST'])
@login_required(role='admin',perm={'snippet':'full'})
def user_snippet_delete():
input = request.json
id=input.get('id', 0)
snippet=db_user_tasks.get_snippet(id)
if snippet:
db_syslog.add_syslog_event(get_myself(), "Snippet","Delete", get_ip(),get_agent(),json.dumps(input))
snippet=db_user_tasks.delete_snippet(id)
return buildResponse({"result":"success"}, 200)
else:
return buildResponse({"result":"failed","err":"Failed to delete snippet"}, 200)

51
py/api/api_sysconfig.py Normal file
View file

@ -0,0 +1,51 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# api_sysconfig.py: API for MikroWizard system config
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from flask import request
from libs.db import db_sysconfig,db_syslog
from libs import util
from libs.webutil import app, login_required,buildResponse,get_myself,get_ip,get_agent
import time
import logging
import json
log = logging.getLogger("api.sysconfig")
@app.route('/api/sysconfig/get_all', methods = ['POST'])
@login_required(role='admin',perm={'settings':'read'})
def sysconfig_get_all():
"""get all system configs"""
input = request.json
sysconfig=db_sysconfig.get_all()
res={}
for s in sysconfig:
res[s.key]={"value":s.value,"modified":s.modified}
return buildResponse({"sysconfigs":res})
@app.route('/api/sysconfig/save_all', methods = ['POST'])
@login_required(role='admin',perm={'settings':'write'})
def sysconfig_save_all():
"""save system configs"""
input = request.json
data=[]
now=time.time()
for k,v in input.items():
if k=="default_password" and v['value']=="":
continue
elif k=="default_user" and v['value']=="":
continue
elif k=="default_password" or k=="default_user":
v['value']=util.crypt_data(v['value'])
data.append({"key":k,"value":v['value'],"modified":"NOW"})
db_syslog.add_syslog_event(get_myself(), "Sys Config","Update", get_ip(),get_agent(),json.dumps(input))
db_sysconfig.save_all(data)
return buildResponse({"status":"success"})

239
py/api/api_user_tasks.py Normal file
View file

@ -0,0 +1,239 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# api_user_tasks.py: API for create modify schedule tasks
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from flask import request
from libs.db import db_syslog,db_user_tasks
from libs.webutil import app, login_required,buildResponse,get_myself,get_ip,get_agent
from functools import reduce
import operator
from crontab import CronTab,CronSlices
import logging
from cron_descriptor import get_description
import json
from pathlib import Path
try:
from libs import utilpro
ISPRO=True
except ImportError:
ISPRO=False
pass
log = logging.getLogger("api.usertasks")
@app.route('/api/user_tasks/list', methods = ['POST'])
@login_required(role='admin',perm={'task':'read'})
def user_tasks_list():
"""return user task list"""
input = request.json
name=input.get('name',False)
description=input.get('description',False)
action=input.get('action',False)
task_type=input.get('task_type',False)
utaks=db_user_tasks.UserTasks
# build where query
clauses = []
if name:
clauses.append(utaks.name.contains(name))
if description:
clauses.append(utaks.description.contains(description))
if action:
clauses.append(utaks.action == action)
if task_type:
clauses.append(utaks.task_type == task_type)
if not ISPRO:
clauses.append(utaks.task_type != 'firmware')
expr=""
logs = []
selector=[utaks.id,utaks.name,utaks.description,utaks.desc_cron,utaks.action,utaks.task_type,utaks.dev_ids,utaks.snippetid,utaks.data,utaks.cron,utaks.selection_type,utaks.created]
try:
if len(clauses):
expr = reduce(operator.and_, clauses)
query=utaks.select(*selector).where(expr)
else:
query=utaks.select(*selector)
query=query.order_by(utaks.id.desc())
logs=list(query.dicts())
except Exception as e:
return buildResponse({"status":"failed", "err":str(e)},200)
return buildResponse(logs,200)
@app.route('/api/user_tasks/create', methods = ['POST'])
@login_required(role='admin',perm={'task':'write'})
def user_tasks_create():
"""crate user task"""
input = request.json
name=input.get('name',False)
description=input.get('description',False)
snippetid=input.get('snippetid',False)
members=input.get('members', False)
cron=input.get('cron',False)
action=input.get('action',False)
task_type=input.get('task_type',"backup")
selection_type=input.get('selection_type',False)
taskdata=input.get('data',False)
utasks=db_user_tasks.UserTasks
# todo
# add owner check devids and dev groups with owner
if not name or not description:
return buildResponse({'status': 'failed'},200,error="Wrong name/desc")
#check if cron is valid and correct
if cron and not CronSlices.is_valid(cron):
return buildResponse({'status': 'failed'},200,error="Wrong Cron")
data={
'name':name,
'description':description,
'snippetid':int(snippetid) if snippetid else 0,
'cron':cron,
'desc_cron': get_description(cron),
'action': action,
'task_type':task_type,
'selection_type':selection_type,
'data':json.dumps(taskdata) if taskdata else None
}
if selection_type not in ["devices","groups"]:
return buildResponse({'status': 'failed'}, 200, error="Wrong member type")
if task_type not in ["backup","snippet","firmware"]:
return buildResponse({'status': 'failed'}, 200, error="Wrong task type")
try:
task=utasks.create(**data)
#add members to task
if len(members):
db_user_tasks.add_member_to_task(task.id, members, selection_type)
taskid=task.id
if task_type=="backup":
crontab = CronTab(user=True)
directory=Path(app.root_path).parent.absolute()
command = "python3 {}/task_run.py {}".format(directory,taskid)
comment = "MikroWizard task #" + "taskid:{};".format(taskid)
jobs = crontab.find_comment(comment)
if len(list(jobs)) > 0:
jobs = crontab.find_comment(comment)
crontab.remove(jobs)
crontab.write()
job = crontab.new(command=command,comment=comment)
job.setall(cron)
crontab.write()
db_syslog.add_syslog_event(get_myself(), "Task","Create", get_ip(),get_agent(),json.dumps(input))
return buildResponse([{'status': 'success',"taskid":taskid}],200)
except Exception as e:
log.error(e)
return buildResponse({'status': 'failed','massage':str(e)},200)
@app.route('/api/user_tasks/edit', methods = ['POST'])
@login_required(role='admin',perm={'task':'write'})
def user_tasks_edit():
"""create edit user task"""
input = request.json
name=input.get('name',False)
task_id=input.get('id', False)
description=input.get('description',False)
snippetid=input.get('snippetid',False)
members=input.get('members', False)
cron=input.get('cron',False)
action=input.get('action',False)
task_type=input.get('task_type',"backup")
selection_type=input.get('selection_type',False)
taskdata=input.get('data', False)
# todo
# add owner check devids and dev groups with owner
if not name or not description:
return buildResponse({'status': 'failed'},200,error="Wrong name/desc")
# Check if cron is valid and correct
if cron and not CronSlices.is_valid(cron):
return buildResponse({'status': 'failed'},200,error="Wrong Cron")
if selection_type not in ["devices","groups"]:
return buildResponse({'status': 'failed'}, 200, error="Wrong member type")
if task_type not in ["backup","snippet","firmware"]:
return buildResponse({'status': 'failed'}, 200, error="Wrong task type")
# check task exist and valid
utask=db_user_tasks.get_object_or_none(db_user_tasks.UserTasks, id=task_id)
data={
'name':name,
'description':description,
'snippetid':int(snippetid) if snippetid else 0,
'cron':cron,
'desc_cron': get_description(cron),
'action': action,
'task_type':task_type,
'selection_type':selection_type,
'data':json.dumps(taskdata) if taskdata else None
}
# Update utask
utasks=db_user_tasks.UserTasks
utasks.update(**data).where(utasks.id == utask.id).execute()
# Delete old members
db_user_tasks.delete_members(utask.id)
# Add new members
if len(members):
db_user_tasks.add_member_to_task(task_id, members, selection_type)
try:
taskid=utask.id
crontab = CronTab(user=True)
directory=Path(app.root_path).parent.absolute()
command = "/usr/local/bin/python3 {}/task_run.py {} >> /var/log/cron.log 2>&1".format(directory,taskid)
comment = "MikroWizard task #" + "taskid:{};".format(taskid)
jobs = crontab.find_comment(comment)
if len(list(jobs)) > 0:
jobs = crontab.find_comment(comment)
crontab.remove(jobs)
crontab.write()
job = crontab.new(command=command,comment=comment)
job.setall(cron)
crontab.write()
db_syslog.add_syslog_event(get_myself(), "Task","Edit", get_ip(),get_agent(),json.dumps(input))
return buildResponse([{'status': 'success',"taskid":taskid}],200)
except Exception as e:
log.error(e)
return buildResponse({'status': 'failed','massage':str(e)},200)
@app.route('/api/user_tasks/delete', methods = ['POST'])
@login_required(role='admin',perm={'task':'full'})
def user_tasks_delete():
"""delete user task"""
input = request.json
taskid=input.get('taskid',False)
utaks=db_user_tasks.UserTasks
crontab = CronTab(user=True)
utask=db_user_tasks.get_object_or_none(db_user_tasks.UserTasks, id=taskid)
comment = "MikroWizard task #" + "taskid:{};".format(taskid)
if not taskid:
return buildResponse({'status': 'failed'},200,error="Wrong name/desc")
try:
jobs = crontab.find_comment(comment)
if len(list(jobs)) > 0:
jobs = crontab.find_comment(comment)
crontab.remove(jobs)
crontab.write()
# Delete old members
db_user_tasks.delete_members(utask.id)
# delete task
res=utaks.delete().where(utaks.id == utask.id).execute()
if res:
db_syslog.add_syslog_event(get_myself(), "Task","Delete", get_ip(),get_agent(),json.dumps(input))
return buildResponse([{'status': 'success',"taskid":res}],200)
else:
return buildResponse([{'status': 'failed',"massage":"record not exist"}],200)
except Exception as e:
log.error(e)
return buildResponse({'status': 'failed','massage':str(e)},200)

384
py/bgtasks.py Normal file
View file

@ -0,0 +1,384 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# bgtasks.py: background tasks, which are run in separate worker processes
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com thanks to Tomi.Mickelsson@iki.fi
from uwsgidecorators import spool
from libs import util
import time
from libs.db import db_tasks,db_device,db_events,db_user_group_perm,db_device
from threading import Thread
import queue
import pexpect
import re
from libs.db.db_device import Devices,EXCLUDED,database
import ipaddress
import socket
from libs.check_routeros.routeros_check.resource import RouterOSCheckResource
from typing import Dict
import json
sensor_pile = queue.LifoQueue()
other_sensor_pile = queue.LifoQueue()
import logging
log = logging.getLogger("bgtasks")
@spool(pass_arguments=True)
def check_devices_for_update(*args, **kwargs):
task=db_tasks.update_check_status()
if not task.status:
task.status=1
task.save()
try:
#check only one device for update
if kwargs.get('devices',False):
devids=kwargs.get('devices',False)
uid=kwargs.get('uid',False)
devs=False
if "0" == devids:
devs=list(db_user_group_perm.DevUserGroupPermRel.get_user_devices(uid))
else:
devids=devids.split(",")
devs=list(db_user_group_perm.DevUserGroupPermRel.get_user_devices_by_ids(uid,devids))
num_threads = len(devs)
q = queue.Queue()
threads = []
for dev in devs:
t = Thread(target=util.check_device_firmware_update, args=(dev, q))
t.start()
threads.append(t)
for t in threads:
t.join()
res=[]
for _ in range(num_threads):
qres=q.get()
if not qres.get("reason",False):
res.append(qres)
else:
db_events.connection_event(dev.id,qres["reason"])
db_device.update_devices_firmware_status(res)
except Exception as e:
log.error(e)
task.status=0
task.save()
return False
task.status=0
task.save()
return False
@spool(pass_arguments=True)
def update_device(*args, **kwargs):
task=db_tasks.update_job_status()
if not task.status:
task.status=1
task.save()
try:
if kwargs.get('devices',False):
devids=kwargs.get('devices',False)
devs=False
uid=kwargs.get('uid',False)
if "0" == devids:
devs=list(db_user_group_perm.DevUserGroupPermRel.get_user_devices(uid))
else:
devids=devids.split(",")
devs=list(db_user_group_perm.DevUserGroupPermRel.get_user_devices_by_ids(uid,devids))
num_threads = len(devs)
q = queue.Queue()
threads = []
for dev in devs:
if dev.failed_attempt>0:
dev.failed_attempt=0
dev.save()
if(not dev.update_availble):
continue
t = Thread(target=util.update_device, args=(dev, q))
t.start()
threads.append(t)
for t in threads:
t.join()
res=[]
for _ in range(num_threads):
qres=q.get()
except Exception as e:
log.error(e)
task.status=0
task.save()
return False
task.status=0
task.save()
return False
@spool(pass_arguments=True)
def download_firmware(*args, **kwargs):
task=db_tasks.downloader_job_status()
if not task.status:
task.status=1
task.save()
# time.sleep(5)
try:
if kwargs.get('version',False):
ver=kwargs.get('version',False)
num_threads = 1
q = queue.Queue()
threads = []
t = Thread(target=util.download_firmware_to_repository, args=(ver, q))
t.start()
threads.append(t)
for t in threads:
t.join()
res=[]
for _ in range(num_threads):
qres=q.get()
print(qres)
# db_device.update_devices_firmware_status(res)
except Exception as e:
log.error(e)
task.status=0
task.save()
return False
task.status=0
task.save()
return False
@spool(pass_arguments=True)
def backup_devices(*args, **kwargs):
task=db_tasks.backup_job_status()
if not task.status:
task.status=1
task.save()
# time.sleep(5)
try:
if kwargs.get('devices',False):
devices=kwargs.get('devices',False)
if len(devices):
num_threads = len(devices)
q = queue.Queue()
threads = []
for dev in devices:
t = Thread(target=util.backup_routers, args=(dev, q))
t.start()
threads.append(t)
for t in threads:
t.join()
res=[]
for _ in range(num_threads):
qres=q.get()
if not qres['status']:
util.log_alert('backup',dev,'Backup failed')
res.append(qres)
else:
task.status=0
task.save()
return False
except Exception as e:
log.error(e)
task.status=0
task.save()
return False
task.status=0
task.save()
return False
def extract_device_from_macdiscovery(line):
regex = r"(\d{1,3}.\d{1,3}.\d{1,3}.\d{1,3}).*?([0-9A-Fa-f]{1,2}:[0-9A-Fa-f]{1,3}:[0-9A-Fa-f]{1,3}:[0-9A-Fa-f]{1,3}:[0-9A-Fa-f]{1,3}:[0-9A-Fa-f]{1,3})\s+(.+?(?= \(M))\s+(\(.+\))\s+up (\d{1,5} days \d{1,5} hours)\s+?([A-Za-z0-9]{1,9}-?[A-Za-z0-9]{1,9})\s+?([a-z]{1,7}[0-9]{0,2}/?[a-z]{1,7}[0-9]{0,2})"
matches = re.finditer(regex, line, re.MULTILINE)
sgroups=[]
for matchNum, match in enumerate(matches, start=1):
for groupNum in range(0, len(match.groups())):
groupNum = groupNum + 1
sgroups.append(match.group(groupNum))
return sgroups
@spool(pass_arguments=True)
def scan_with_mac(timer=2):
task=db_tasks.backup_job_status()
child = pexpect.spawn('mactelnet -l')
child.expect("MAC-Address")
output=""
while child.isalive() and timer!=0:
time.sleep(1)
# print("loging")
#output=child.read_nonblocking(131)
try:
temp=child.read_nonblocking(131,1).decode()
except:
temp=output
if not temp in output:
output+=temp
timer-=1
lines=output.split("\r\n")
data=[]
for line in lines:
if line.strip() == '' or len(line)<1:
continue
temp={}
DevData=extract_device_from_macdiscovery(line)
try:
temp['ip']=DevData[0]
temp['mac']=DevData[1]
temp['name']=DevData[2]
temp['details']=DevData[3]
temp['uptime']=DevData[4]
temp['license']=DevData[5]
temp['interface']=DevData[6]
data.append(temp)
except:
#print("folowwing line is not valid")
#print(line)
pass
if len(data):
log.info("Found {} devices ".format(len(data)))
#ugly hack to reset sequnce number if device id
database.execute_sql("SELECT setval('devices_id_seq', MAX(id), true) FROM devices")
# update device list
Devices.insert_many(data).on_conflict(conflict_target=Devices.mac,update={Devices.ip:EXCLUDED.ip,Devices.uptime:EXCLUDED.uptime,Devices.name:EXCLUDED.name,Devices.interface:EXCLUDED.interface,Devices.details:EXCLUDED.details}).execute()
return True
@spool(pass_arguments=True)
def scan_with_ip(*args, **kwargs):
try:
task=db_tasks.scanner_job_status()
task.status=1
task.save()
start_ip=kwargs.get('start',False)
end_ip=kwargs.get('end',False)
username=kwargs.get('username',False)
password=kwargs.get('password',False)
if not start_ip or not end_ip:
task.status=0
task.save()
return True
start_ip = ipaddress.IPv4Address(start_ip)
end_ip = ipaddress.IPv4Address(end_ip)
scan_port=kwargs.get('port',False)
default_user,default_pass=util.get_default_user_pass()
log.error("stating scan ")
mikrotiks=[]
scan_results=[]
dev_number=0
for ip_int in range(int(start_ip), int(end_ip)):
ip=str(ipaddress.IPv4Address(ip_int))
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.2)
result = sock.connect_ex((ip,int(scan_port)))
if result == 0:
scan_results.append({})
scan_results[dev_number]['ip']=ip
dev={
'ip':ip
}
options={
'host':ip,
'username':username if username else default_user,
'password':password if password else default_pass,
'routeros_version':'auto',
'port':scan_port,
'ssl':False
}
router=RouterOSCheckResource(options)
try:
call = router.api.path(
"/system/resource"
)
results = tuple(call)
result: Dict[str, str] = results[0]
call = router.api.path(
"/system/routerboard"
)
routerboard = tuple(call)
routerboard: Dict[str, str] = routerboard[0]
result.update(routerboard)
call = router.api.path(
"/system/identity"
)
name = tuple(call)
name: Dict[str, str] = name[0]
result.update(name)
call = router.api.path(
"/interface"
)
interfaces = list(tuple(call))
# interfaces: Dict[str, str] = interfaces[0]
result['interfaces']=interfaces
call = router.api.path(
"/ip/address"
)
ips = list(tuple(call))
result['ips']=ips
is_availbe , current , arch , upgrade_availble =util.check_update(options,router)
for p in ips:
if ip+"/" in p['address']:
current_interface=p['interface']
break
for inter in interfaces:
if inter['name']==current_interface:
result['interface']=inter
break
src_ip=sock.getsockname()[0]
device={}
device['ip']=ip
device['update_availble']=is_availbe
device['upgrade_availble']=upgrade_availble
device['current_firmware']=current
device['mac']=result['interface']['mac-address']
device['name']=result['name']
device['details']=result['board-name'] + " " + result['model'] if result['model']!=result['board-name'] else result['model']
device['uptime']=result['uptime']
device['license']=""
device['interface']=result['interface']['name']
device['user_name']=util.crypt_data(options['username'])
device['password']=util.crypt_data(options['password'])
device['port']=options['port']
device['arch']=result['architecture-name']
device['peer_ip']=src_ip
mikrotiks.append(device)
scan_results[dev_number]['added']=True
dev_number+=1
except Exception as e:
scan_results[dev_number]['added']=False
scan_results[dev_number]['faileres']=str(e)
dev_number+=1
log.error(e)
continue
sock.close()
try:
db_tasks.add_task_result('ip-scan', json.dumps(scan_results))
except:
pass
#ugly hack to reset sequnce number if device id
database.execute_sql("SELECT setval('devices_id_seq', MAX(id), true) FROM devices")
try:
Devices.insert_many(mikrotiks).on_conflict(conflict_target=Devices.mac,
update={Devices.ip:EXCLUDED.ip,
Devices.uptime:EXCLUDED.uptime,
Devices.name:EXCLUDED.name,
Devices.interface:EXCLUDED.interface,
Devices.details:EXCLUDED.details}).execute()
except Exception as e:
log.error(e)
task.status=0
task.save()
task.status=0
task.save()
return True
except Exception as e:
log.error(e)
task.status=0
task.save()
return True

82
py/config.py Normal file
View file

@ -0,0 +1,82 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# config.py: configuration data of this app
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com thanks to Tomi.Mickelsson@iki.fi
import os
import redis
import json
import time
# first load config from a json file,
srvconf = json.load(open(os.environ["PYSRV_CONFIG_PATH"]))
# then override with env variables
for k, v in os.environ.items():
if k.startswith("PYSRV_"):
print("env override ", k)
srvconf[k] = v
# grand switch to production!
IS_PRODUCTION = bool(srvconf['PYSRV_IS_PRODUCTION'] or False)
# local dev flag
IS_LOCAL_DEV = os.environ.get("FLASK_ENV") == "development" and not IS_PRODUCTION
# IS_LOCAL_DEV = False
print("\nCONFIG: prod={},localdev={} ({})\n".format(
IS_PRODUCTION, IS_LOCAL_DEV, srvconf["name"]))
# database config
DATABASE_HOST = srvconf['PYSRV_DATABASE_HOST']
DATABASE_PORT = srvconf['PYSRV_DATABASE_PORT']
DATABASE_NAME = srvconf['PYSRV_DATABASE_NAME']
DATABASE_USER = srvconf['PYSRV_DATABASE_USER']
DATABASE_PASSWORD = srvconf['PYSRV_DATABASE_PASSWORD']
CRYPT_KEY = srvconf['PYSRV_CRYPT_KEY']
BACKUP_DIR = srvconf['PYSRV_BACKUP_FOLDER']
FIRM_DIR = srvconf['PYSRV_FIRM_FOLDER']
IS_SQLITE = DATABASE_HOST.startswith("/")
# Flask + session config
# http://flask.pocoo.org/docs/1.0/config/
# https://pythonhosted.org/Flask-Session/
redishost = srvconf['PYSRV_REDIS_HOST']
flask_config = dict(
# app config
TESTING = IS_LOCAL_DEV,
SECRET_KEY = None, # we have server-side sessions
# session config - hardcoded to Redis
SESSION_TYPE = 'redis',
SESSION_REDIS = redis.from_url('redis://{}'.format(redishost)),
SESSION_COOKIE_NAME = "Session-Id",
SESSION_COOKIE_SECURE = srvconf['PYSRV_COOKIE_HTTPS_ONLY'] if not IS_LOCAL_DEV else False, # require https?
SESSION_COOKIE_HTTPONLY = True, # don't allow JS cookie access
SESSION_KEY_PREFIX = 'mikrowizard::',
PERMANENT_SESSION_LIFETIME = 60*60*24*30, # 1 month
SESSION_COOKIE_DOMAIN = srvconf['PYSRV_DOMAIN_NAME'] or None if not IS_LOCAL_DEV else None,
)
# dump sql statements in log file?
PYSRV_LOG_SQL = srvconf.get('PYSRV_LOG_SQL')
# allow API access to this domain
CORS_ALLOW_ORIGIN = srvconf.get('PYSRV_CORS_ALLOW_ORIGIN', '*')
START_TIME = int(time.time())
def started_ago(as_string=False):
"""Returns how many seconds ago the server was started. Or as a string."""
ago = int(time.time()) - START_TIME
if as_string:
return "{}d {:02d}:{:02d}:{:02d}".format(int(ago/60/60/24),
int(ago/60/60)%24, int(ago/60)%60, ago%60)
else:
return ago

81
py/libs/account.py Normal file
View file

@ -0,0 +1,81 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# account.py: user account related operations, passwords
# MikroWizard.com , Mikrotik router management solution
# Author: Tomi.Mickelsson@iki.fi modified by sepehr.ha@gmail.com
import re
from shutil import ExecError
from flask import session
from passlib.context import CryptContext
import json
import logging
log = logging.getLogger("account")
pwd_context = CryptContext(
schemes=["pbkdf2_sha256", "bcrypt"] # list of supported algos
)
def build_session(user_obj, is_permanent=True):
"""On login+signup, builds the server-side session dict with the data we
need. userid being the most important."""
assert user_obj
assert user_obj.id
log.error(session)
# make sure session is empty
session.clear()
session['userid'] = user_obj.id
session['role'] = user_obj.role # if you update user.role, update this too
try:
session['perms'] = json.loads(user_obj.adminperms)
except Exception as e:
log.error(e)
session['perms']=[]
# remember session even over browser restarts?
session.permanent = is_permanent
# could also store ip + browser-agent to verify freshness
# of the session: only allow most critical operations with a fresh
# session
def hash_password(password):
"""Generate a secure hash out of the password. Salts automatically."""
return pwd_context.hash(password)
def check_password(hash, password):
"""Check if given plaintext password matches with the hash."""
return pwd_context.verify(password, hash)
def check_password_validity(passwd):
"""Validates the given plaintext password. Returns None for success,
error text on error."""
err = None
if not passwd or len(passwd) < 6:
err = "Password must be atleast 6 characters"
elif not re.search(r"[a-z]", passwd) \
or not re.search(r"[A-Z]", passwd) \
or not re.search(r"[0-9]", passwd):
err = "Password must contain a lowercase, an uppercase, a digit"
if err:
log.error("password validity: %s", err)
return err
def new_signup_steps(user_obj):
"""Perform steps for a new signup."""
#nothing for now
return True

@ -0,0 +1 @@
Subproject commit 115d72248e6a970d6748d00a32b62aead3005d6d

170
py/libs/db/db.py Normal file
View file

@ -0,0 +1,170 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_user_tasks.py: Models and functions for accsessing db
# MikroWizard.com , Mikrotik router management solution
# Author: Tomi.Mickelsson@iki.fi modified by sepehr.ha@gmail.com
from peewee import *
from playhouse.shortcuts import model_to_dict
from psycopg2.extensions import ISOLATION_LEVEL_SERIALIZABLE
from flask import abort
import config
import logging
log = logging.getLogger("db")
if config.IS_SQLITE:
# config.DATABASE_HOST is full path to sqlite file
database = SqliteDatabase(config.DATABASE_HOST, pragmas={})
else:
from playhouse.postgres_ext import PostgresqlExtDatabase, ArrayField, BinaryJSONField, BooleanField, JSONField
# support for arrays of uuid
import psycopg2.extras
psycopg2.extras.register_uuid()
database = PostgresqlExtDatabase(config.DATABASE_NAME,
user=config.DATABASE_USER, password=config.DATABASE_PASSWORD,
host=config.DATABASE_HOST, port=config.DATABASE_PORT , isolation_level=ISOLATION_LEVEL_SERIALIZABLE)
# --------------------------------------------------------------------------
# Base model and common methods
class BaseModel(Model):
"""Base class for all database models."""
# exclude these fields from the serialized dict
EXCLUDE_FIELDS = []
def serialize(self):
"""Serialize the model into a dict."""
d = model_to_dict(self, recurse=False, exclude=self.EXCLUDE_FIELDS)
d["id"] = str(d["id"]) # unification: id is always a string
return d
class Meta:
database = database
def get_object_or_404(model, **kwargs):
"""Retrieve a single object or abort with 404."""
try:
return model.get(**kwargs)
except model.DoesNotExist:
log.warning("NO OBJECT {} {}".format(model, kwargs))
abort(200)
def get_object_or_none(model, **kwargs):
"""Retrieve a single object or return None."""
try:
return model.get(**kwargs)
except model.DoesNotExist:
return None
# --------------------------------------------------------------------------
# USER
class User(BaseModel):
# Should user.id be an integer or uuid? Both have pros and cons.
# Since user.id is sensitive data, I selected uuid here.
if not config.IS_SQLITE:
id = UUIDField(primary_key=True)
id.auto_increment = True # is auto generated by server
username = TextField()
password = TextField()
hash = TextField()
first_name = TextField()
last_name = TextField()
role = TextField()
email = TextField()
adminperms = TextField()
if not config.IS_SQLITE:
tags = ArrayField(TextField)
else:
tags = TextField()
created = DateTimeField()
modified = DateTimeField()
EXCLUDE_FIELDS = [password,hash] # never expose password
def is_superuser(self):
return self.role == "superuser"
def full_name(self):
return "{} {}".format(self.first_name, self.last_name or '')
def serialize(self):
"""Serialize this object to dict/json."""
d = super(User, self).serialize()
# add extra data
d["fullname"] = self.full_name()
d["tags"] = self.tags or [] # never None
return d
def __str__(self):
return "<User {}, {}, role={}>".format(self.id,
self.username, self.role)
class Meta:
db_table = 'users'
def get_user(uid):
"""Return user object or throw."""
return get_object_or_404(User, id=uid)
def get_user_by_username(username):
"""Return user object or None"""
if not username:
return None
try:
# return User.select().where(User.username == username).get()
# case insensitive query
if config.IS_SQLITE:
sql = "SELECT * FROM users where username = ? LIMIT 1"
args = username.lower()
else:
sql = "SELECT * FROM users where LOWER(username) = LOWER(%s) LIMIT 1"
args = (username,)
return list(User.raw(sql, args))[0]
except IndexError:
return None
def query_users(page=0, limit=1000, search=None):
"""Return list of users. Desc order"""
page = int(page or 0)
limit = int(limit or 1000)
q = User.select()
if search:
search = "%"+search+"%"
q = q.where(User.first_name ** search | User.last_name ** search |
User.username ** search)
q = q.paginate(page, limit).order_by(User.id.desc())
return q
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

143
py/libs/db/db_AA.py Normal file
View file

@ -0,0 +1,143 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_snippet.py: Models and functions for accsessing db related to auth and acc
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from calendar import c
from itertools import count
from peewee import *
from libs.db.db_device import Devices
from libs.db.db import User,BaseModel
import time
import logging
log = logging.getLogger("db_AA")
import random
import string
# --------------------------------------------------------------------------
# this model contains two foreign keys to user -- it essentially allows us to
# model a "many-to-many" relationship between users. by querying and joining
# on different columns we can expose who a user is "related to" and who is
# "related to" a given user
class Auth(BaseModel):
devid = ForeignKeyField(db_column='devid', null=True, model=Devices, to_field='id')
ltype = TextField()
username = TextField()
ip = TextField()
sessionid = TextField()
by = TextField()
started=BigIntegerField()
ended=BigIntegerField()
message=TextField()
created = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'auth'
def add_log(devid,type,username,ip,by,sessionid=False,timestamp=False,message=None):
if type=='failed':
rand=''.join(random.choice(string.ascii_lowercase + string.digits) for _ in range(8))
auth=Auth.select().where(Auth.ltype==type, Auth.username==username.strip())
if message=='radius':
count=0
while(len(list(auth))<1 and count<33):
auth=auth.where(Auth.started > timestamp-2,Auth.started < timestamp+2)
time.sleep(0.3)
count+=1
else:
auth=False
if auth:
count=1
for a in auth:
if by:
a.by=by.strip()
a.sessionid=str(timestamp+count)+rand
count+=1
a.save()
else:
if by:
by=by.strip()
event=Auth(devid=int(devid), ltype=type, username=username.strip(), ip=ip.strip(), by=by,started=timestamp, ended=timestamp, message=message)
event.save()
elif type=='loggedin':
auth=Auth.select().where(Auth.devid==devid, Auth.ltype==type, Auth.username==username.strip())
if sessionid:
auth=auth.where(Auth.sessionid==sessionid)
else:
if message=='radius':
auth=auth.where(Auth.started > timestamp-2,Auth.started < timestamp+2)
count=0
while(len(list(auth))<1 and count<33):
auth=auth.where(Auth.started > timestamp-2,Auth.started < timestamp+2)
time.sleep(0.3)
count+=1
log.error(count)
else:
auth=False
log.error(auth)
if auth:
log.error(list(auth))
if auth and len(list(auth))>0:
auth=list(auth)
for a in auth:
if sessionid and not a.sessionid:
a.sessionid=sessionid
if by:
a.by=by.strip()
if message:
a.message=message
a.save()
else:
if not sessionid:
sessionid=None
if by:
by=by.strip()
event=Auth(devid=devid,ltype=type,username=username.strip(),ip=ip.strip(),by=by,started=timestamp,sessionid=sessionid,message=message)
event.save()
else:
if sessionid:
Auth.update(ended = timestamp).where(Auth.sessionid==sessionid).execute()
else:
#check if we have same record with type loggedout and same timestamp and same username and if there is not create one
if message=='radius':
pass
else:
event=Auth(devid=devid, ltype=type, username=username.strip(), ip=ip.strip(), by=by.strip(), ended=timestamp,message=message)
event.save()
class Account(BaseModel):
devid = ForeignKeyField(db_column='devid', null=True, model=Devices, to_field='id')
username = TextField()
action = TextField()
section = TextField()
message = TextField()
ctype = TextField()
address = TextField()
config = TextField()
created = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'account'
def add_log(devid,section,action,username,message,ctype="unknown",address="unknown",config="unknown"):
event=Account(devid=devid,section=section.strip(),action=action.strip(),message=message.strip(),username=username.strip(),ctype=ctype.strip(),address=address.strip(),config=config.strip())
# print(event.query())
event.save()
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

53
py/libs/db/db_backups.py Normal file
View file

@ -0,0 +1,53 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_backups.py: Models and functions for accsessing db related to backups
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db_device import Devices
from libs.db.db import User,BaseModel,get_object_or_404
import datetime
import logging
log = logging.getLogger("db_backup")
class Backups(BaseModel):
devid = ForeignKeyField(db_column='devid', null=True, model=Devices, to_field='id')
dir = TextField()
filesize = IntegerField()
created = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'backups'
def get_backup(id):
return get_object_or_404(Backups, id=id)
def query_backup_jobs(page=0, limit=1000, search=None , devid=False):
page = int(page or 0)
limit = int(limit or 1000)
q = Backups.select()
if search:
search = "%"+search+"%"
q = q.where(Backups.dir ** search)
if devid:
q = q.where(Backups.devid == devid)
start_time=datetime.datetime.now()-datetime.timedelta(days=3)
q = q.where(Backups.created >= start_time)
q = q.paginate(page, limit).order_by(Backups.id.desc())
return q
def create(dev,directory,size):
backup=Backups(devid=dev.id,dir=directory,filesize=size)
backup.save()
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

153
py/libs/db/db_device.py Normal file
View file

@ -0,0 +1,153 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_device.py: Models and functions for accsessing db related to devices
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db import User,BaseModel,database
import logging
from playhouse.postgres_ext import BooleanField
log = logging.getLogger("db")
class Devices(BaseModel):
#id - automatic
name = TextField()
ip = TextField()
mac = TextField()
details = TextField()
uptime = TextField()
license = TextField()
interface = TextField()
user_name = TextField()
password = TextField()
port = TextField()
update_availble = BooleanField()
current_firmware = TextField()
arch = TextField()
sensors = TextField()
router_type = TextField()
wifi_config = TextField()
upgrade_availble = BooleanField()
owner = ForeignKeyField(db_column='owner', null=True, model=User, to_field='id')
created = DateTimeField()
modified = DateTimeField()
peer_ip = TextField()
failed_attempt = IntegerField()
status = TextField()
firmware_to_install = TextField()
syslog_configured = BooleanField()
class Meta:
db_table = 'devices'
def get_device(id):
q=Devices.select().where(Devices.id == id).dicts().get()
return q
def get_devices(ids):
q=list(Devices.select().where(Devices.id << ids))
return q
def query_device_by_mac(mac):
q=Devices.select()
try:
q=q.where(Devices.serial == mac).get()
except:
q=False
return q
def query_device_by_ip(ip):
q=Devices.select()
try:
q=q.where(Devices.ip == ip).get()
except:
q=False
return q
def get_all_device():
q=Devices.select()
try:
q=q
except:
q=False
return q
def get_devices_by_id(ids):
q=Devices.select().where(Devices.id << ids)
try:
q=list(q)
except Exception as e :
log.error(e)
q=False
return q
def get_devices_by_id2(ids):
q=Devices.select().where(Devices.id << ids)
try:
q=q
except Exception as e :
log.error(e)
q=False
return q
#same with get all devices but we dont return sensetive data
def get_all_device_api():
q=Devices.select(
Devices.id,
Devices.name ,
Devices.ip ,
Devices.mac ,
Devices.details ,
Devices.uptime ,
Devices.license ,
Devices.interface ,
Devices.user_name ,
Devices.port ,
Devices.update_availble ,
Devices.current_firmware ,
Devices.arch ,
Devices.sensors ,
Devices.upgrade_availble ,
Devices.owner ,
Devices.created ,
Devices.modified
).order_by(Devices.id)
try:
q=list(q.dicts())
except:
q=False
return q
def update_devices_firmware_status(data):
database.execute_sql("SELECT setval('devices_id_seq', MAX(id), true) FROM devices")
query=Devices.insert_many(data).on_conflict(conflict_target=Devices.id,update={Devices.update_availble:EXCLUDED.update_availble,Devices.upgrade_availble:EXCLUDED.upgrade_availble,Devices.current_firmware:EXCLUDED.current_firmware,Devices.arch:EXCLUDED.arch})
query.execute()
return True
def update_device(devid, user_name, password, ip, peer_ip, name):
device=get_device(devid)
if not device:
return False
try:
query=Devices.update(user_name=user_name, password=password, ip=ip, peer_ip=peer_ip, name=name).where(Devices.id == devid)
query.execute()
except:
return False
return True
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

126
py/libs/db/db_events.py Normal file
View file

@ -0,0 +1,126 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_events.py: Models and functions for accsessing db related to Events
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db_device import Devices
from libs.db.db import BaseModel
import logging
log = logging.getLogger("db_events")
from playhouse.postgres_ext import BooleanField
class Events(BaseModel):
devid = ForeignKeyField(db_column='devid', null=True, model=Devices, to_field='id')
eventtype = TextField()
detail = TextField()
level = TextField()
src = TextField()
eventtime = DateTimeField()
status = BooleanField()
comment = TextField()
fixtime = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'events'
def get_events_by_src_and_status(src, status,devid):
return Events.select().where(Events.src==src, Events.status==status, Events.devid==devid)
def fix_event(id):
event=Events.get(Events.id==id)
event.update(status=1,fixtime='NOW').where(Events.id==event.id).execute()
def connection_event(devid,src,detail,level,status=0,comment=""):
#check if we have same event for device before adding new one
event=Events.select().where(
Events.devid==devid,
Events.eventtype=="connection",
Events.src==src,
Events.detail==detail,
Events.level==level,
Events.status==False)
if not event and not status:
event=Events(devid=devid, eventtype="connection", detail=detail, level=level, src=src, status=status ,comment=comment)
event.save()
elif event and status:
list(event)[0].update(status=status).execute()
def config_event(devid,src,detail,level,status=0,comment=""):
#check if we have same event for device before adding new one
event=Events.select().where(
Events.devid==devid,
Events.eventtype=="config",
Events.src==src,
Events.detail==detail,
Events.level==level,
Events.status==False)
if not event and not status:
event=Events(devid=devid, eventtype="config", detail=detail, level=level, src=src, status=status, comment=comment)
event.save()
elif event and status:
list(event)[0].update(status=status).execute()
def firmware_event(devid,src,detail,level,status=0,comment=""):
#check if we have same event for device before adding new one
event=Events.select().where(
Events.devid==devid,
Events.eventtype=="firmware",
Events.src==src,
Events.detail==detail,
Events.level==level,
Events.status==False)
if not event and not status:
event=Events(devid=devid, eventtype="firmware", detail=detail, level=level, src=src, status=status, comment=comment)
event.save()
elif event and status:
list(event)[0].update(status=status).execute()
def health_event(devid, src, detail, level, status=0, comment=""):
#check if we have same event for device before adding new one
event=Events.select().where(
Events.devid==devid,
Events.eventtype=="health",
Events.src==src,
Events.detail==detail,
Events.level==level,
Events.status==False)
if not event and not status:
event=Events(devid=devid, eventtype="health", detail=detail, level=level, src=src, status=status, comment=comment)
event.save()
elif event and status:
list(event)[0].update(status=status).execute()
def state_event(devid, src, detail, level, status=0, comment=""):
#check if we have same event for device before adding new one
event=Events.select().where(
Events.devid==devid,
Events.eventtype=="state",
Events.src==src,
Events.detail==detail,
Events.level==level,
Events.status==False)
if not event and not status:
event=Events(devid=devid, eventtype="state", detail=detail, level=level, src=src, status=status, comment=comment)
event.save()
elif event and status:
list(event)[0].update(status=status).execute()
elif not event and status:
event=Events(devid=devid, eventtype="state", detail=detail, level=level, src=src, status=status, comment=comment)
event.save()
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

59
py/libs/db/db_firmware.py Normal file
View file

@ -0,0 +1,59 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_firmware.py: Models and functions for accsessing db related to Firmware
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db import BaseModel,get_object_or_none
import logging
log = logging.getLogger("db_firmware")
class Firmware(BaseModel):
version = TextField()
location = TextField()
architecture = TextField()
sha256 = TextField()
created = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'firmware'
def get_firm(id):
return get_object_or_none(Firmware, id=id)
def get_frim_by_version(version,arch):
return get_object_or_none(Firmware, version=version, architecture=arch)
def create_perm(datas):
for data in datas:
perm=Firmware()
perm.version = data["version"]
perm.location = data["location"]
perm.architecture = data["architecture"]
perm.sha256 = data["sha256"]
perm.save(force_insert=True)
def query_firms(page=0, limit=1000, search=None):
page = int(page or 0)
limit = int(limit or 1000)
q = Firmware.select()
if search:
search = "%"+search+"%"
q = q.where(Firmware.version ** search)
q = q.paginate(page, limit).order_by(Firmware.id.desc())
return q
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

158
py/libs/db/db_groups.py Normal file
View file

@ -0,0 +1,158 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_group.py: Models and functions for accsessing db related to Device groups
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db import User,BaseModel,get_object_or_none
import logging
from libs.db.db_device import Devices
log = logging.getLogger("db_groups")
class DevGroups(BaseModel):
name = TextField()
owner = ForeignKeyField(db_column='owner', null=True,
model=User, to_field='id')
created = DateTimeField()
modified = DateTimeField()
class Meta:
db_table = 'device_groups'
def get_group(id):
return get_object_or_none(DevGroups, id=id)
class DevGroupRel(BaseModel):
group_id = ForeignKeyField(db_column='group_id', null=True,
model=DevGroups, to_field='id')
device_id = ForeignKeyField(db_column='device_id', null=True,
model=Devices, to_field='id')
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'device_groups_devices_rel'
indexes = (
# Specify a unique multi-column index on from/to-user.
(('group_id', 'device_id'), True),
)
def create_group(name):
try:
#check if we have a group with same name
group = get_object_or_none(DevGroups,name=name)
#if we do, return id
if group:
return False
group = DevGroups.create(name=name)
except IntegrityError:
return False
return group
def update_group(id, name):
group = get_group(id)
group.name = name
group.save()
return group
def add_devices_to_group(group, devids):
data=[]
for devid in devids:
data.append({'group_id': group, 'device_id': devid})
res=DevGroupRel.insert_many(data).on_conflict_ignore().execute()
return res
#Get groups of device
def devgroups(devid):
return (DevGroups
.select()
.join(DevGroupRel, on=DevGroupRel.group_id)
.where(DevGroupRel.device_id == devid)
.order_by(DevGroups.name))
#Get devices of group
def devs(groupid):
return (Devices
.select()
.join(DevGroupRel, on=DevGroupRel.device_id)
.where(DevGroupRel.group_id == groupid)
.order_by(Devices.name))
#Get groups of device
def devgroups_api(devid):
return list(DevGroups
.select()
.join(DevGroupRel, on=DevGroupRel.group_id)
.where(DevGroupRel.device_id == devid)
.order_by(DevGroups.name).dicts())
#Get devices of group in dict
def devs(groupid):
return list(Devices
.select()
.join(DevGroupRel, on=DevGroupRel.device_id)
.where(DevGroupRel.group_id == groupid)
.order_by(Devices.name).dicts())
#Get devices of group
def devs2(groupid):
return list(Devices
.select()
.join(DevGroupRel, on=DevGroupRel.device_id)
.where(DevGroupRel.group_id == groupid)
.order_by(Devices.name))
def get_devs_of_groups(group_ids):
try:
return list(Devices
.select()
.join(DevGroupRel, on=DevGroupRel.device_id)
.where(DevGroupRel.group_id << group_ids)
.order_by(Devices.name))
except Exception as e :
log.error(e)
return []
#get all groups including devices in each group
def query_groups_api():
t3=DevGroups.alias()
q=DevGroups.select(DevGroups.id,DevGroups.name,DevGroups.created,fn.array_agg(DevGroupRel.device_id)).join(DevGroupRel,JOIN.LEFT_OUTER, on=(DevGroupRel.group_id == DevGroups.id)).order_by(DevGroups.id).group_by(DevGroups.id)
return list(q.dicts())
def get_groups_by_id(ids):
"""Return list of unique directors. An example of a raw SQL query."""
q=DevGroups.select().where(DevGroups.id << ids)
try:
q=list(q)
except Exception as e :
log.error(e)
q=False
return q
def delete_from_group(devids):
delete=DevGroupRel.delete().where(DevGroupRel.device_id << devids).execute()
return delete
def delete_device(devid):
try:
delete_from_group([devid])
dev = get_object_or_none(Devices, id=devid)
dev.delete_instance(recursive=True)
return True
except Exception as e:
log.error(e)
return False
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

View file

@ -0,0 +1,74 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_permissions.py: Models and functions for accsessing db related to device permisions
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
import config
from libs.db.db import BaseModel,get_object_or_none
import logging
log = logging.getLogger("db_permisions")
class Perms(BaseModel):
name = TextField()
perms = TextField()
created = DateTimeField()
modified = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'permissions'
def get_perm(id):
return get_object_or_none(Perms, id=id)
def create_perm(name,perms):
perm=Perms()
perm.name = name
perm.perms = perms
perm.save(force_insert=True)
def delete_perm(id):
if id in [1,2,3]:
return False
perm = get_object_or_none(Perms, id=id)
perm.delete_instance(recursive=True)
def get_perm_by_name(name):
if not name:
return None
try:
# case insensitive query
if config.IS_SQLITE:
sql = "SELECT * FROM permissions where name = ? LIMIT 1"
args = name.lower()
else:
sql = "SELECT * FROM permissions where LOWER(name) = LOWER(%s) LIMIT 1"
args = (name,)
return list(Perms.raw(sql, args))[0]
except IndexError:
return None
def query_perms(page=0, limit=1000, search=None):
page = int(page or 0)
limit = int(limit or 1000)
q = Perms.select()
q = q.paginate(page, limit).order_by(Perms.id.desc())
return q
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

View file

@ -0,0 +1,79 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_sysconfig.py: Models and functions for accsessing db related to mikrowizard system configs
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db import User,BaseModel,get_object_or_404
import logging
log = logging.getLogger("db_sysconfig")
class Sysconfig(BaseModel):
#id - automatic
key = TextField()
value = TextField()
created_by = ForeignKeyField(db_column='created_by', null=True,
model=User, to_field='id')
created = DateTimeField()
modified = DateTimeField()
class Meta:
db_table = 'sysconfig'
def get_default_user():
return get_object_or_404(Sysconfig, key="default_user")
def get_all():
return Sysconfig.select()
def save_all(data):
Sysconfig.insert_many(data).on_conflict(conflict_target=['key'], preserve=(Sysconfig.value,Sysconfig.modified)).execute()
def get_default_password():
return get_object_or_404(Sysconfig, key="default_password")
def update_sysconfig(key,value):
return Sysconfig.update(value=value).where(Sysconfig.key == key).execute()
def get_scan_mode():
return get_object_or_404(Sysconfig, key="scan_mode")
def get_sysconfig(key):
return get_object_or_404(Sysconfig, key=key).value
def get_firmware_latest():
return get_object_or_404(Sysconfig, key="latest_version")
def get_firmware_action():
return get_object_or_404(Sysconfig, key="old_firmware_action")
def get_firmware_old():
return get_object_or_404(Sysconfig, key="old_version")
def get_mac_scan_interval():
return get_object_or_404(Sysconfig, key="mac_scan_interval")
def get_ip_scan_interval():
"""Return Movie or throw."""
return get_object_or_404(Sysconfig, key="ip_scan_interval")
def update_sysconfig(key,value):
return Sysconfig.insert(value=value,key=key).on_conflict(conflict_target=['key'], preserve=['key'], update={'value':value}).execute() # firm.version = version
def set_sysconfig(key,value):
return Sysconfig.insert(value=value, key=key).on_conflict(conflict_target=['key'], preserve=['key'], update={'value':value}).execute() # firm.version = version
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

47
py/libs/db/db_syslog.py Normal file
View file

@ -0,0 +1,47 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_syslog.py: Models and functions for accsessing db related to mikrowizard internal logs
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db import User,BaseModel
import logging
log = logging.getLogger("db_syslog")
# --------------------------------------------------------------------------
# this model contains two foreign keys to user -- it essentially allows us to
# model a "many-to-many" relationship between users. by querying and joining
# on different columns we can expose who a user is "related to" and who is
# "related to" a given user
class SysLog(BaseModel):
user_id = ForeignKeyField(db_column='user_id', null=True, model=User, to_field='id')
action = TextField()
section = TextField()
ip = TextField()
agent = TextField()
data = TextField()
created = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'syslogs'
def add_syslog_event(user_id,section,action,ip,agent,data):
event=SysLog(user_id=user_id, section=section, action=action,ip=ip,agent=agent, data=data)
event.save()
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

73
py/libs/db/db_tasks.py Normal file
View file

@ -0,0 +1,73 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_tasks.py: Models and functions for accsessing db related to mikrowizard internal logs
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db import User,BaseModel
import logging
log = logging.getLogger("db_tasks")
class Tasks(BaseModel):
signal = TextField()
starttime = DateTimeField()
endtime = DateTimeField()
status = BooleanField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'tasks'
#Get groups of device
def update_check_status():
return (Tasks.select().where(Tasks.signal == 100).get())
#Get groups of device
def update_job_status():
return (Tasks.select().where(Tasks.signal == 110).get())
#Get groups of device
def backup_job_status():
return (Tasks.select().where(Tasks.signal == 120).get())
#check status of scanner
def scanner_job_status():
return (Tasks.select().where(Tasks.signal == 130).get())
#check status of downloader
def downloader_job_status():
return (Tasks.select().where(Tasks.signal == 140).get())
def firmware_service_status():
return (Tasks.select().where(Tasks.signal == 150).get())
class TaskResults(BaseModel):
task_type = TextField()
result = DateTimeField()
created = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'task_results'
def add_task_result(task_type,result):
tr = TaskResults(task_type=task_type, result=result)
tr.save()
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

View file

@ -0,0 +1,88 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_user_group_perm.py: Models and functions for accsessing db related to user groups relation permision
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db_device import Devices
from libs.db.db import User,BaseModel
from libs.db.db_permissions import Perms
from libs.db.db_groups import DevGroups,DevGroupRel
import logging
log = logging.getLogger("db_user_group_perm")
class DevUserGroupPermRel(BaseModel):
user_id = ForeignKeyField(User, related_name='user_id')
group_id = ForeignKeyField(DevGroups, related_name='group_id')
perm_id = ForeignKeyField(Perms, related_name='perm_id')
class Meta:
db_table = 'user_group_perm_rel'
def __str__(self):
return "DevUserGroupPermRel: user_id: %s, group_id: %s, perm_id: %s" % (self.user_id, self.group_id, self.perm_id)
def __repr__(self):
return "DevUserGroupPermRel: user_id: %s, group_id: %s, perm_id: %s" % (self.user_id, self.group_id, self.perm_id)
def get_user_devices(uid,group_id):
perms=list(DevUserGroupPermRel.select().where(DevUserGroupPermRel.user_id == uid))
for perm in perms:
if group_id==1 or (perm.group_id.id == 1 and not group_id):
return Devices.select()
elif perm.group_id.id == 1 and group_id:
return Devices.select().join(DevGroupRel).where(DevGroupRel.group_id == group_id)
if group_id:
return Devices.select().join(DevGroupRel).join(DevUserGroupPermRel,on=(DevUserGroupPermRel.group_id == DevGroupRel.group_id)).where(DevUserGroupPermRel.user_id == uid, DevGroupRel.group_id == group_id)
return Devices.select().join(DevGroupRel).join(DevUserGroupPermRel,on=(DevUserGroupPermRel.group_id == DevGroupRel.group_id)).where(DevUserGroupPermRel.user_id == uid)
def get_user_devices_by_ids(uid,ids):
perms=list(DevUserGroupPermRel.select().where(DevUserGroupPermRel.user_id == uid))
for perm in perms:
if perm.group_id.id == 1:
return Devices.select().where(Devices.id << ids)
return Devices.select().join(DevGroupRel).join(DevUserGroupPermRel,on=(DevUserGroupPermRel.group_id == DevGroupRel.group_id)).where(DevUserGroupPermRel.user_id == uid,Devices.id << ids)
def delete_group(gid):
#check if group exists
group = DevGroups.select().where(DevGroups.id == gid)
if group:
try:
#First delete records from DevGroupRel
delete=DevGroupRel.delete().where(DevGroupRel.group_id == gid).execute()
#delete group records from DevUserGroupPermRel
delete=DevUserGroupPermRel.delete().where(DevUserGroupPermRel.group_id == gid).execute()
delete=DevGroups.delete().where(DevGroups.id == gid).execute()
return True
except Exception as e:
return False
return False
def get_user_group_perms(uid):
return DevUserGroupPermRel.select().where(DevUserGroupPermRel.user_id == uid)
def create_user_group_perm(user_id, group_id, perm_id):
return DevUserGroupPermRel.create(user_id=user_id, group_id=group_id, perm_id=perm_id)
def query_permission_by_user_and_device_group(uid , devgrupid):
q = DevUserGroupPermRel.select().where(DevUserGroupPermRel.group_id << devgrupid,DevUserGroupPermRel.user_id == uid)
return (q)
def get_user_group_perm(id):
try:
return DevUserGroupPermRel.select().where(DevUserGroupPermRel.id == id).get()
except:
return False
def delete_user_group_perm(id):
try:
return DevUserGroupPermRel.delete().where(DevUserGroupPermRel.id == id).execute()
except:
return False

128
py/libs/db/db_user_tasks.py Normal file
View file

@ -0,0 +1,128 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# db_user_tasks.py: Models and functions for accsessing db related to user tasks
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from peewee import *
from libs.db.db_device import Devices
from libs.db.db import User,BaseModel,get_object_or_none
from libs.db.db_groups import DevGroups,get_devs_of_groups
import logging
log = logging.getLogger("db_user_tasks")
class Snippets(BaseModel):
name = TextField()
description = TextField()
content = TextField()
created = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'snippets'
def get_snippet_by_name(name):
return get_object_or_none(Snippets, name=name)
def get_snippet(id):
return get_object_or_none(Snippets, id=id)
def update_snippet(id,name, description, content):
snippet = get_object_or_none(Snippets, id=id)
snippet.name = name
snippet.description = description
snippet.content = content
snippet.save()
def create_snippet(name, description, content):
snippet = Snippets()
snippet.name = name
snippet.description = description
snippet.content = content
snippet.save()
def delete_snippet(id):
snippet = get_object_or_none(Snippets, id=id)
snippet.delete_instance()
class UserTasks(BaseModel):
name = TextField()
description = TextField()
desc_cron = TextField()
dev_ids = TextField()
snippetid = ForeignKeyField(db_column='snippetid', null=True,
model=Snippets, to_field='id')
data = TextField()
cron = TextField()
action = TextField()
task_type = TextField()
selection_type = TextField()
created = DateTimeField()
class Meta:
# `indexes` is a tuple of 2-tuples, where the 2-tuples are
# a tuple of column names to index and a boolean indicating
# whether the index is unique or not.
db_table = 'user_tasks'
def get_utask_by_id(tid):
return get_object_or_none(UserTasks, id=tid)
class TaskDevRel(BaseModel):
utask_id = ForeignKeyField(UserTasks, related_name='utask_id')
group_id = ForeignKeyField(DevGroups, related_name='group_id')
device_id = ForeignKeyField(Devices, related_name='device_id')
class Meta:
db_table = 'task_group_dev_rel'
def get_task_devices(task,return_devs=True):
members=[]
members=list(TaskDevRel.select().where(TaskDevRel.utask_id == task.id).execute())
devs=[]
if task.selection_type=='groups':
group_ids=[]
for mem in members:
try:
group_ids.append(mem.group_id)
except DoesNotExist as err:
pass
if return_devs:
devs=get_devs_of_groups(group_ids)
else:
devs=group_ids
else:
for mem in members:
try:
devs.append(mem.device_id)
except DoesNotExist as err:
pass
return devs
def add_member_to_task(task_id,members,type='devices'):
data=[]
for member in members:
if type=='groups':
data.append({'utask_id': task_id, 'group_id': member})
else:
data.append({'utask_id': task_id, 'device_id': member})
res=TaskDevRel.insert_many(data).on_conflict_ignore().execute()
return res
def delete_members(task_id):
res=TaskDevRel.delete().where(TaskDevRel.utask_id == task_id).execute()
return res
# --------------------------------------------------------------------------
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)

150
py/libs/mschap3/U32.py Normal file
View file

@ -0,0 +1,150 @@
# U32.py implements 32-bit unsigned int class for Python
# Version 1.0
# Copyright (C) 2001-2002 Dmitry Rozmanov
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# e-mail: dima@xenon.spb.ru
#
#====================================================================
C = 0x1000000000
#--------------------------------------------------------------------
def norm(n):
return n & 0xFFFFFFFF
#====================================================================
class U32:
v = 0
#--------------------------------------------------------------------
def __init__(self, value = 0):
self.v = C + norm(abs(int(value)))
#--------------------------------------------------------------------
def set(self, value = 0):
self.v = C + norm(abs(int(value)))
#--------------------------------------------------------------------
def __repr__(self):
return hex(norm(self.v))
#--------------------------------------------------------------------
def __long__(self): return int(norm(self.v))
#--------------------------------------------------------------------
def __int__(self): return int(norm(self.v))
__index__ = __int__
#--------------------------------------------------------------------
def __chr__(self): return chr(norm(self.v))
#--------------------------------------------------------------------
def __add__(self, b):
r = U32()
r.v = C + norm(self.v + b.v)
return r
#--------------------------------------------------------------------
def __sub__(self, b):
r = U32()
if self.v < b.v:
r.v = C + norm(0x100000000 - (b.v - self.v))
else: r.v = C + norm(self.v - b.v)
return r
#--------------------------------------------------------------------
def __mul__(self, b):
r = U32()
r.v = C + norm(self.v * b.v)
return r
#--------------------------------------------------------------------
def __div__(self, b):
r = U32()
r.v = C + (norm(self.v) / norm(b.v))
return r
#--------------------------------------------------------------------
def __mod__(self, b):
r = U32()
r.v = C + (norm(self.v) % norm(b.v))
return r
#--------------------------------------------------------------------
def __neg__(self): return U32(self.v)
#--------------------------------------------------------------------
def __pos__(self): return U32(self.v)
#--------------------------------------------------------------------
def __abs__(self): return U32(self.v)
#--------------------------------------------------------------------
def __invert__(self):
r = U32()
r.v = C + norm(~self.v)
return r
#--------------------------------------------------------------------
def __lshift__(self, b):
r = U32()
r.v = C + norm(self.v << b)
return r
#--------------------------------------------------------------------
def __rshift__(self, b):
r = U32()
r.v = C + (norm(self.v) >> b)
return r
#--------------------------------------------------------------------
def __and__(self, b):
r = U32()
r.v = C + norm(self.v & b.v)
return r
#--------------------------------------------------------------------
def __or__(self, b):
r = U32()
r.v = C + norm(self.v | b.v)
return r
#--------------------------------------------------------------------
def __xor__(self, b):
r = U32()
r.v = C + norm(self.v ^ b.v)
return r
#--------------------------------------------------------------------
def __not__(self):
return U32(not norm(self.v))
#--------------------------------------------------------------------
def truth(self):
return norm(self.v)
#--------------------------------------------------------------------
def __cmp__(self, b):
if norm(self.v) > norm(b.v): return 1
elif norm(self.v) < norm(b.v): return -1
else: return 0
#--------------------------------------------------------------------
def __bool__(self):
return norm(self.v)

View file

97
py/libs/mschap3/des.py Normal file
View file

@ -0,0 +1,97 @@
# This file is part of 'NTLM Authorization Proxy Server' http://sourceforge.net/projects/ntlmaps/
# Copyright 2001 Dmitry A. Rozmanov <dima@xenon.spb.ru>
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>.
from . import des_c
# ---------------------------------------------------------------------
class DES:
des_c_obj = None
# -----------------------------------------------------------------
def __init__(self, key_str):
""""""
k = str_to_key56(key_str)
k = key56_to_key64(k)
key_str = b""
for i in k:
key_str += bytes((i & 0xFF,))
self.des_c_obj = des_c.DES(key_str)
# -----------------------------------------------------------------
def encrypt(self, plain_text):
""""""
return self.des_c_obj.encrypt(plain_text)
# -----------------------------------------------------------------
def decrypt(self, crypted_text):
""""""
return self.des_c_obj.decrypt(crypted_text)
# ---------------------------------------------------------------------
# Some Helpers
# ---------------------------------------------------------------------
DESException = "DESException"
# ---------------------------------------------------------------------
def str_to_key56(key_str):
""""""
if type(key_str) != type(""):
# rise DESException, 'ERROR. Wrong key type.'
pass
if len(key_str) < 7:
key_str = key_str + b"\000\000\000\000\000\000\000"[: (7 - len(key_str))]
key_56 = []
for i in key_str[:7]:
key_56.append(i)
return key_56
# ---------------------------------------------------------------------
def key56_to_key64(key_56):
""""""
key = []
for i in range(8):
key.append(0)
key[0] = key_56[0]
key[1] = ((key_56[0] << 7) & 0xFF) | (key_56[1] >> 1)
key[2] = ((key_56[1] << 6) & 0xFF) | (key_56[2] >> 2)
key[3] = ((key_56[2] << 5) & 0xFF) | (key_56[3] >> 3)
key[4] = ((key_56[3] << 4) & 0xFF) | (key_56[4] >> 4)
key[5] = ((key_56[4] << 3) & 0xFF) | (key_56[5] >> 5)
key[6] = ((key_56[5] << 2) & 0xFF) | (key_56[6] >> 6)
key[7] = (key_56[6] << 1) & 0xFF
key = set_key_odd_parity(key)
return key
# ---------------------------------------------------------------------
def set_key_odd_parity(key):
""""""
for i in range(len(key)):
for k in range(7):
bit = 0
t = key[i] >> k
bit = (t ^ bit) & 0x1
key[i] = (key[i] & 0xFE) | bit
return key

358
py/libs/mschap3/des_c.py Normal file
View file

@ -0,0 +1,358 @@
# This file is part of 'NTLM Authorization Proxy Server' http://sourceforge.net/projects/ntlmaps/
# Copyright 2001 Dmitry A. Rozmanov <dima@xenon.spb.ru>
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>.
from .U32 import U32
from .des_data import des_SPtrans, des_skb
# --NON ASCII COMMENT ELIDED--
# typedef unsigned char des_cblock[8];
# define HDRSIZE 4
def c2l(c):
"char[4] to unsigned long"
l = U32(c[0])
l = l | (U32(c[1]) << 8)
l = l | (U32(c[2]) << 16)
l = l | (U32(c[3]) << 24)
return l
def c2ln(c, l1, l2, n):
"char[n] to two unsigned long???"
c = c + n
l1, l2 = U32(0), U32(0)
f = 0
if n == 8:
l2 = l2 | (U32(c[7]) << 24)
f = 1
if f or (n == 7):
l2 = l2 | (U32(c[6]) << 16)
f = 1
if f or (n == 6):
l2 = l2 | (U32(c[5]) << 8)
f = 1
if f or (n == 5):
l2 = l2 | U32(c[4])
f = 1
if f or (n == 4):
l1 = l1 | (U32(c[3]) << 24)
f = 1
if f or (n == 3):
l1 = l1 | (U32(c[2]) << 16)
f = 1
if f or (n == 2):
l1 = l1 | (U32(c[1]) << 8)
f = 1
if f or (n == 1):
l1 = l1 | U32(c[0])
return (l1, l2)
def l2c(l):
"unsigned long to char[4]"
c = []
c.append(int(l & U32(0xFF)))
c.append(int((l >> 8) & U32(0xFF)))
c.append(int((l >> 16) & U32(0xFF)))
c.append(int((l >> 24) & U32(0xFF)))
return c
def n2l(c, l):
"network to host long"
l = U32(c[0] << 24)
l = l | (U32(c[1]) << 16)
l = l | (U32(c[2]) << 8)
l = l | (U32(c[3]))
return l
def l2n(l, c):
"host to network long"
c = []
c.append(int((l >> 24) & U32(0xFF)))
c.append(int((l >> 16) & U32(0xFF)))
c.append(int((l >> 8) & U32(0xFF)))
c.append(int((l) & U32(0xFF)))
return c
def l2cn(l1, l2, c, n):
""""""
for i in range(n):
c.append(0x00)
f = 0
if f or (n == 8):
c[7] = int((l2 >> 24) & U32(0xFF))
f = 1
if f or (n == 7):
c[6] = int((l2 >> 16) & U32(0xFF))
f = 1
if f or (n == 6):
c[5] = int((l2 >> 8) & U32(0xFF))
f = 1
if f or (n == 5):
c[4] = int((l2) & U32(0xFF))
f = 1
if f or (n == 4):
c[3] = int((l1 >> 24) & U32(0xFF))
f = 1
if f or (n == 3):
c[2] = int((l1 >> 16) & U32(0xFF))
f = 1
if f or (n == 2):
c[1] = int((l1 >> 8) & U32(0xFF))
f = 1
if f or (n == 1):
c[0] = int((l1) & U32(0xFF))
f = 1
return c[:n]
# array of data
# static unsigned long des_SPtrans[8][64]={
# static unsigned long des_skb[8][64]={
def D_ENCRYPT(tup, u, t, s):
L, R, S = tup
# print 'LRS1', L, R, S, u, t, '-->',
u = R ^ s[S]
t = R ^ s[S + 1]
t = (t >> 4) + (t << 28)
L = L ^ (
des_SPtrans[1][int((t) & U32(0x3F))]
| des_SPtrans[3][int((t >> 8) & U32(0x3F))]
| des_SPtrans[5][int((t >> 16) & U32(0x3F))]
| des_SPtrans[7][int((t >> 24) & U32(0x3F))]
| des_SPtrans[0][int((u) & U32(0x3F))]
| des_SPtrans[2][int((u >> 8) & U32(0x3F))]
| des_SPtrans[4][int((u >> 16) & U32(0x3F))]
| des_SPtrans[6][int((u >> 24) & U32(0x3F))]
)
# print 'LRS:', L, R, S, u, t
return ((L, R, S), u, t, s)
def PERM_OP(tup, n, m):
"tup - (a, b, t)"
a, b, t = tup
t = ((a >> n) ^ b) & m
b = b ^ t
a = a ^ (t << n)
return (a, b, t)
def HPERM_OP(tup, n, m):
"tup - (a, t)"
a, t = tup
t = ((a << (16 - n)) ^ a) & m
a = a ^ t ^ (t >> (16 - n))
return (a, t)
shifts2 = [0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0]
class DES:
KeySched = None # des_key_schedule
def __init__(self, key_str):
# key - UChar[8]
key = []
# ~ for i in key_str: key.append(ord(i))
# print 'key:', key
self.KeySched = des_set_key(key_str)
# print 'schedule:', self.KeySched, len(self.KeySched)
def decrypt(self, str):
# block - UChar[]
block = []
for i in str:
block.append(ord(i))
# print block
block = des_ecb_encrypt(block, self.KeySched, 0)
res = b""
for i in block:
res = res + (chr(i))
return res
def encrypt(self, str):
# block - UChar[]
block = []
for i in str:
block.append(i)
block = des_ecb_encrypt(block, self.KeySched, 1)
res = b""
for i in block:
res = res + bytes((i,))
return res
# ------------------------
def des_encript(input, ks, encrypt):
# input - U32[]
# output - U32[]
# ks - des_key_shedule - U32[2][16]
# encrypt - int
# l, r, t, u - U32
# i - int
# s - U32[]
l = input[0]
r = input[1]
t = U32(0)
u = U32(0)
r, l, t = PERM_OP((r, l, t), 4, U32(0x0F0F0F0F))
l, r, t = PERM_OP((l, r, t), 16, U32(0x0000FFFF))
r, l, t = PERM_OP((r, l, t), 2, U32(0x33333333))
l, r, t = PERM_OP((l, r, t), 8, U32(0x00FF00FF))
r, l, t = PERM_OP((r, l, t), 1, U32(0x55555555))
t = (r << 1) | (r >> 31)
r = (l << 1) | (l >> 31)
l = t
s = ks # ???????????????
# print l, r
if encrypt:
for i in range(0, 32, 4):
rtup, u, t, s = D_ENCRYPT((l, r, i + 0), u, t, s)
l = rtup[0]
r = rtup[1]
rtup, u, t, s = D_ENCRYPT((r, l, i + 2), u, t, s)
r = rtup[0]
l = rtup[1]
else:
for i in range(30, 0, -4):
rtup, u, t, s = D_ENCRYPT((l, r, i - 0), u, t, s)
l = rtup[0]
r = rtup[1]
rtup, u, t, s = D_ENCRYPT((r, l, i - 2), u, t, s)
r = rtup[0]
l = rtup[1]
# print l, r
l = (l >> 1) | (l << 31)
r = (r >> 1) | (r << 31)
r, l, t = PERM_OP((r, l, t), 1, U32(0x55555555))
l, r, t = PERM_OP((l, r, t), 8, U32(0x00FF00FF))
r, l, t = PERM_OP((r, l, t), 2, U32(0x33333333))
l, r, t = PERM_OP((l, r, t), 16, U32(0x0000FFFF))
r, l, t = PERM_OP((r, l, t), 4, U32(0x0F0F0F0F))
output = [l]
output.append(r)
l, r, t, u = U32(0), U32(0), U32(0), U32(0)
return output
def des_ecb_encrypt(input, ks, encrypt):
# input - des_cblock - UChar[8]
# output - des_cblock - UChar[8]
# ks - des_key_shedule - U32[2][16]
# encrypt - int
# print input
l0 = c2l(input[0:4])
l1 = c2l(input[4:8])
ll = [l0]
ll.append(l1)
# print ll
ll = des_encript(ll, ks, encrypt)
# print ll
l0 = ll[0]
l1 = ll[1]
output = l2c(l0)
output = output + l2c(l1)
# print output
l0, l1, ll[0], ll[1] = U32(0), U32(0), U32(0), U32(0)
return output
def des_set_key(key):
# key - des_cblock - UChar[8]
# schedule - des_key_schedule
# register unsigned long c,d,t,s;
# register unsigned char *in;
# register unsigned long *k;
# register int i;
# k = schedule
# in = key
k = []
c = c2l(key[0:4])
d = c2l(key[4:8])
t = U32(0)
d, c, t = PERM_OP((d, c, t), 4, U32(0x0F0F0F0F))
c, t = HPERM_OP((c, t), -2, U32(0xCCCC0000))
d, t = HPERM_OP((d, t), -2, U32(0xCCCC0000))
d, c, t = PERM_OP((d, c, t), 1, U32(0x55555555))
c, d, t = PERM_OP((c, d, t), 8, U32(0x00FF00FF))
d, c, t = PERM_OP((d, c, t), 1, U32(0x55555555))
d = (
((d & U32(0x000000FF)) << 16)
| (d & U32(0x0000FF00))
| ((d & U32(0x00FF0000)) >> 16)
| ((c & U32(0xF0000000)) >> 4)
)
c = c & U32(0x0FFFFFFF)
for i in range(16):
if shifts2[i]:
c = (c >> 2) | (c << 26)
d = (d >> 2) | (d << 26)
else:
c = (c >> 1) | (c << 27)
d = (d >> 1) | (d << 27)
c = c & U32(0x0FFFFFFF)
d = d & U32(0x0FFFFFFF)
s = (
des_skb[0][int((c) & U32(0x3F))]
| des_skb[1][int(((c >> 6) & U32(0x03)) | ((c >> 7) & U32(0x3C)))]
| des_skb[2][int(((c >> 13) & U32(0x0F)) | ((c >> 14) & U32(0x30)))]
| des_skb[3][
int(
((c >> 20) & U32(0x01))
| ((c >> 21) & U32(0x06))
| ((c >> 22) & U32(0x38))
)
]
)
t = (
des_skb[4][int((d) & U32(0x3F))]
| des_skb[5][int(((d >> 7) & U32(0x03)) | ((d >> 8) & U32(0x3C)))]
| des_skb[6][int((d >> 15) & U32(0x3F))]
| des_skb[7][int(((d >> 21) & U32(0x0F)) | ((d >> 22) & U32(0x30)))]
)
# print s, t
k.append(((t << 16) | (s & U32(0x0000FFFF))) & U32(0xFFFFFFFF))
s = (s >> 16) | (t & U32(0xFFFF0000))
s = (s << 4) | (s >> 28)
k.append(s & U32(0xFFFFFFFF))
schedule = k
return schedule

1098
py/libs/mschap3/des_data.py Normal file

File diff suppressed because it is too large Load diff

253
py/libs/mschap3/md4.py Executable file
View file

@ -0,0 +1,253 @@
# md4.py implements md4 hash class for Python
# Version 1.0
# Copyright (C) 2001-2002 Dmitry Rozmanov
#
# based on md4.c from "the Python Cryptography Toolkit, version 1.0.0
# Copyright (C) 1995, A.M. Kuchling"
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# e-mail: dima@xenon.spb.ru
#
#====================================================================
# MD4 validation data
md4_test= [
('', 0x31d6cfe0d16ae931b73c59d7e0c089c0),
("a", 0xbde52cb31de33e46245e05fbdbd6fb24),
("abc", 0xa448017aaf21d8525fc10ae87aa6729d),
("message digest", 0xd9130a8164549fe818874806e1c7014b),
("abcdefghijklmnopqrstuvwxyz", 0xd79e1c308aa5bbcdeea8ed63df412da9),
("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789",
0x043f8582f241db351ce627e153e7f0e4),
("12345678901234567890123456789012345678901234567890123456789012345678901234567890",
0xe33b4ddc9c38f2199c3e7b164fcc0536),
]
#====================================================================
from .U32 import U32
#--------------------------------------------------------------------
class MD4:
A = None
B = None
C = None
D = None
count, len1, len2 = None, None, None
buf = []
#-----------------------------------------------------
def __init__(self):
self.A = U32(0x67452301)
self.B = U32(0xefcdab89)
self.C = U32(0x98badcfe)
self.D = U32(0x10325476)
self.count, self.len1, self.len2 = U32(0), U32(0), U32(0)
self.buf = [0x00] * 64
#-----------------------------------------------------
def __repr__(self):
r = 'A = %s, \nB = %s, \nC = %s, \nD = %s.\n' % (self.A.__repr__(), self.B.__repr__(), self.C.__repr__(), self.D.__repr__())
r = r + 'count = %s, \nlen1 = %s, \nlen2 = %s.\n' % (self.count.__repr__(), self.len1.__repr__(), self.len2.__repr__())
for i in range(4):
for j in range(16):
r = r + '%4s ' % hex(self.buf[i+j])
r = r + '\n'
return r
#-----------------------------------------------------
def make_copy(self):
dest = new()
dest.len1 = self.len1
dest.len2 = self.len2
dest.A = self.A
dest.B = self.B
dest.C = self.C
dest.D = self.D
dest.count = self.count
for i in range(int(self.count)):
dest.buf[i] = self.buf[i]
return dest
#-----------------------------------------------------
def update(self, str):
buf = []
for i in str: buf.append(ord(i))
ilen = U32(len(buf))
# check if the first length is out of range
# as the length is measured in bits then multiplay it by 8
if (int(self.len1 + (ilen << 3)) < int(self.len1)):
self.len2 = self.len2 + U32(1)
self.len1 = self.len1 + (ilen << 3)
self.len2 = self.len2 + (ilen >> 29)
L = U32(0)
bufpos = 0
while (int(ilen) > 0):
if (64 - int(self.count)) < int(ilen): L = U32(64 - int(self.count))
else: L = ilen
for i in range(int(L)): self.buf[i + int(self.count)] = buf[i + bufpos]
self.count = self.count + L
ilen = ilen - L
bufpos = bufpos + int(L)
if (int(self.count) == 64):
self.count = U32(0)
X = []
i = 0
for j in range(16):
X.append(U32(self.buf[i]) + (U32(self.buf[i+1]) << 8) + \
(U32(self.buf[i+2]) << 16) + (U32(self.buf[i+3]) << 24))
i = i + 4
A = self.A
B = self.B
C = self.C
D = self.D
A = f1(A,B,C,D, 0, 3, X)
D = f1(D,A,B,C, 1, 7, X)
C = f1(C,D,A,B, 2,11, X)
B = f1(B,C,D,A, 3,19, X)
A = f1(A,B,C,D, 4, 3, X)
D = f1(D,A,B,C, 5, 7, X)
C = f1(C,D,A,B, 6,11, X)
B = f1(B,C,D,A, 7,19, X)
A = f1(A,B,C,D, 8, 3, X)
D = f1(D,A,B,C, 9, 7, X)
C = f1(C,D,A,B,10,11, X)
B = f1(B,C,D,A,11,19, X)
A = f1(A,B,C,D,12, 3, X)
D = f1(D,A,B,C,13, 7, X)
C = f1(C,D,A,B,14,11, X)
B = f1(B,C,D,A,15,19, X)
A = f2(A,B,C,D, 0, 3, X)
D = f2(D,A,B,C, 4, 5, X)
C = f2(C,D,A,B, 8, 9, X)
B = f2(B,C,D,A,12,13, X)
A = f2(A,B,C,D, 1, 3, X)
D = f2(D,A,B,C, 5, 5, X)
C = f2(C,D,A,B, 9, 9, X)
B = f2(B,C,D,A,13,13, X)
A = f2(A,B,C,D, 2, 3, X)
D = f2(D,A,B,C, 6, 5, X)
C = f2(C,D,A,B,10, 9, X)
B = f2(B,C,D,A,14,13, X)
A = f2(A,B,C,D, 3, 3, X)
D = f2(D,A,B,C, 7, 5, X)
C = f2(C,D,A,B,11, 9, X)
B = f2(B,C,D,A,15,13, X)
A = f3(A,B,C,D, 0, 3, X)
D = f3(D,A,B,C, 8, 9, X)
C = f3(C,D,A,B, 4,11, X)
B = f3(B,C,D,A,12,15, X)
A = f3(A,B,C,D, 2, 3, X)
D = f3(D,A,B,C,10, 9, X)
C = f3(C,D,A,B, 6,11, X)
B = f3(B,C,D,A,14,15, X)
A = f3(A,B,C,D, 1, 3, X)
D = f3(D,A,B,C, 9, 9, X)
C = f3(C,D,A,B, 5,11, X)
B = f3(B,C,D,A,13,15, X)
A = f3(A,B,C,D, 3, 3, X)
D = f3(D,A,B,C,11, 9, X)
C = f3(C,D,A,B, 7,11, X)
B = f3(B,C,D,A,15,15, X)
self.A = self.A + A
self.B = self.B + B
self.C = self.C + C
self.D = self.D + D
#-----------------------------------------------------
def digest(self):
res = [0x00] * 16
s = [0x00] * 8
padding = [0x00] * 64
padding[0] = 0x80
padlen, oldlen1, oldlen2 = U32(0), U32(0), U32(0)
temp = self.make_copy()
oldlen1 = temp.len1
oldlen2 = temp.len2
if (56 <= int(self.count)): padlen = U32(56 - int(self.count) + 64)
else: padlen = U32(56 - int(self.count))
temp.update(int_array2str(padding[:int(padlen)]))
s[0]= (oldlen1) & U32(0xFF)
s[1]=((oldlen1) >> 8) & U32(0xFF)
s[2]=((oldlen1) >> 16) & U32(0xFF)
s[3]=((oldlen1) >> 24) & U32(0xFF)
s[4]= (oldlen2) & U32(0xFF)
s[5]=((oldlen2) >> 8) & U32(0xFF)
s[6]=((oldlen2) >> 16) & U32(0xFF)
s[7]=((oldlen2) >> 24) & U32(0xFF)
temp.update(int_array2str(s))
res[ 0]= temp.A & U32(0xFF)
res[ 1]=(temp.A >> 8) & U32(0xFF)
res[ 2]=(temp.A >> 16) & U32(0xFF)
res[ 3]=(temp.A >> 24) & U32(0xFF)
res[ 4]= temp.B & U32(0xFF)
res[ 5]=(temp.B >> 8) & U32(0xFF)
res[ 6]=(temp.B >> 16) & U32(0xFF)
res[ 7]=(temp.B >> 24) & U32(0xFF)
res[ 8]= temp.C & U32(0xFF)
res[ 9]=(temp.C >> 8) & U32(0xFF)
res[10]=(temp.C >> 16) & U32(0xFF)
res[11]=(temp.C >> 24) & U32(0xFF)
res[12]= temp.D & U32(0xFF)
res[13]=(temp.D >> 8) & U32(0xFF)
res[14]=(temp.D >> 16) & U32(0xFF)
res[15]=(temp.D >> 24) & U32(0xFF)
return int_array2str(res)
#====================================================================
# helpers
def F(x, y, z): return (((x) & (y)) | ((~x) & (z)))
def G(x, y, z): return (((x) & (y)) | ((x) & (z)) | ((y) & (z)))
def H(x, y, z): return ((x) ^ (y) ^ (z))
def ROL(x, n): return (((x) << n) | ((x) >> (32-n)))
def f1(a, b, c, d, k, s, X): return ROL(a + F(b, c, d) + X[k], s)
def f2(a, b, c, d, k, s, X): return ROL(a + G(b, c, d) + X[k] + U32(0x5a827999), s)
def f3(a, b, c, d, k, s, X): return ROL(a + H(b, c, d) + X[k] + U32(0x6ed9eba1), s)
#--------------------------------------------------------------------
# helper function
def int_array2str(array):
nstr = ''
for i in array:
nstr = nstr + str(chr(i))
return nstr
#--------------------------------------------------------------------
# To be able to use md4.new() instead of md4.MD4()
new = MD4

271
py/libs/mschap3/mppe.py Executable file
View file

@ -0,0 +1,271 @@
from . import mschap
import hashlib
import random
SHSpad1 = \
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + \
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + \
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + \
b"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
SHSpad2 = \
b"\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2" + \
b"\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2" + \
b"\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2" + \
b"\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2\xf2"
Magic1 = \
b"\x54\x68\x69\x73\x20\x69\x73\x20\x74" + \
b"\x68\x65\x20\x4d\x50\x50\x45\x20\x4d" + \
b"\x61\x73\x74\x65\x72\x20\x4b\x65\x79"
Magic2 = \
b"\x4f\x6e\x20\x74\x68\x65\x20\x63\x6c\x69" + \
b"\x65\x6e\x74\x20\x73\x69\x64\x65\x2c\x20" + \
b"\x74\x68\x69\x73\x20\x69\x73\x20\x74\x68" + \
b"\x65\x20\x73\x65\x6e\x64\x20\x6b\x65\x79" + \
b"\x3b\x20\x6f\x6e\x20\x74\x68\x65\x20\x73" + \
b"\x65\x72\x76\x65\x72\x20\x73\x69\x64\x65" + \
b"\x2c\x20\x69\x74\x20\x69\x73\x20\x74\x68" + \
b"\x65\x20\x72\x65\x63\x65\x69\x76\x65\x20" + \
b"\x6b\x65\x79\x2e"
Magic3 = \
b"\x4f\x6e\x20\x74\x68\x65\x20\x63\x6c\x69" + \
b"\x65\x6e\x74\x20\x73\x69\x64\x65\x2c\x20" + \
b"\x74\x68\x69\x73\x20\x69\x73\x20\x74\x68" + \
b"\x65\x20\x72\x65\x63\x65\x69\x76\x65\x20" + \
b"\x6b\x65\x79\x3b\x20\x6f\x6e\x20\x74\x68" + \
b"\x65\x20\x73\x65\x72\x76\x65\x72\x20\x73" + \
b"\x69\x64\x65\x2c\x20\x69\x74\x20\x69\x73" + \
b"\x20\x74\x68\x65\x20\x73\x65\x6e\x64\x20" + \
b"\x6b\x65\x79\x2e"
def mppe_chap2_gen_keys(password, nt_response,nthash=False):
"""
3.3. Generating 128-bit Session Keys
When used in conjunction with MS-CHAP-2 authentication, the initial
MPPE session keys are derived from the peer's Windows NT password.
The first step is to obfuscate the peer's password using
NtPasswordHash() function as described in [8].
NtPasswordHash(Password, PasswordHash)
The first 16 octets of the result are then hashed again using the MD4
algorithm.
PasswordHashHash = md4(PasswordHash)
The first 16 octets of this second hash are used together with the
NT-Response field from the MS-CHAP-2 Response packet [8] as the basis
for the master session key:
GetMasterKey(PasswordHashHash, NtResponse, MasterKey)
Once the master key has been generated, it is used to derive two
128-bit master session keys, one for sending and one for receiving:
GetAsymmetricStartKey(MasterKey, MasterSendKey, 16, TRUE, TRUE)
GetAsymmetricStartKey(MasterKey, MasterReceiveKey, 16, FALSE, TRUE)
The master session keys are never used to encrypt or decrypt data;
they are only used in the derivation of transient session keys. The
initial transient session keys are obtained by calling the function
GetNewKeyFromSHA() (described in [3]):
GetNewKeyFromSHA(MasterSendKey, MasterSendKey, 16, SendSessionKey)
GetNewKeyFromSHA(MasterReceiveKey, MasterReceiveKey, 16,
ReceiveSessionKey)
Finally, the RC4 tables are initialized using the new session keys:
rc4_key(SendRC4key, 16, SendSessionKey)
rc4_key(ReceiveRC4key, 16, ReceiveSessionKey)
"""
if nthash:
password_hash=bytes.fromhex(nthash).decode('iso8859-1',errors='ignore')
else:
password_hash = mschap.nt_password_hash(password)
password_hash_hash = mschap.hash_nt_password_hash(password_hash).encode()
master_key = get_master_key(password_hash_hash, nt_response)
master_send_key = get_asymetric_start_key(master_key, 16, True, True)
master_recv_key = get_asymetric_start_key(master_key, 16, False, True)
return master_send_key, master_recv_key
def get_master_key(password_hash_hash, nt_response):
"""
GetMasterKey(
IN 16-octet PasswordHashHash,
IN 24-octet NTResponse,
OUT 16-octet MasterKey )
{
20-octet Digest
ZeroMemory(Digest, sizeof(Digest));
/*
* SHSInit(), SHSUpdate() and SHSFinal()
* are an implementation of the Secure Hash Standard [7].
*/
SHSInit(Context);
SHSUpdate(Context, PasswordHashHash, 16);
SHSUpdate(Context, NTResponse, 24);
SHSUpdate(Context, Magic1, 27);
SHSFinal(Context, Digest);
MoveMemory(MasterKey, Digest, 16);
}
"""
sha_hash = hashlib.sha1()
sha_hash.update(password_hash_hash)
sha_hash.update(nt_response)
sha_hash.update(Magic1)
return sha_hash.digest()[:16]
def get_asymetric_start_key(master_key, session_key_length, is_send, is_server):
"""
VOID
GetAsymetricStartKey(
IN 16-octet MasterKey,
OUT 8-to-16 octet SessionKey,
IN INTEGER SessionKeyLength,
IN BOOLEAN IsSend,
IN BOOLEAN IsServer )
{
20-octet Digest;
ZeroMemory(Digest, 20);
if (IsSend) {
if (IsServer) {
s = Magic3
} else {
s = Magic2
}
} else {
if (IsServer) {
s = Magic2
} else {
s = Magic3
}
}
/*
* SHSInit(), SHSUpdate() and SHSFinal()
* are an implementation of the Secure Hash Standard [7].
*/
SHSInit(Context);
SHSUpdate(Context, MasterKey, 16);
SHSUpdate(Context, SHSpad1, 40);
SHSUpdate(Context, s, 84);
SHSUpdate(Context, SHSpad2, 40);
SHSFinal(Context, Digest);
MoveMemory(SessionKey, Digest, SessionKeyLength);
}
"""
if is_send:
if is_server:
s = Magic3
else:
s = Magic2
else:
if is_server:
s = Magic2
else:
s = Magic3
sha_hash = hashlib.sha1()
sha_hash.update(master_key)
sha_hash.update(SHSpad1)
sha_hash.update(s)
sha_hash.update(SHSpad2)
return sha_hash.digest()[:session_key_length]
def create_plain_text(key):
key_len = len(key)
key=key.decode(errors='ignore')
while (len(key) + 1) % 16: key += "\000"
return chr(key_len) + key
def create_salts():
send_salt = create_salt()
recv_salt = create_salt()
while send_salt == recv_salt: recv_salt = create_salt()
return (send_salt, recv_salt)
def create_salt():
return chr(128 + random.randrange(0, 128)) + chr(random.randrange(0, 256))
def gen_radius_encrypt_keys(send_key, recv_key, secret, request_authenticator):
send_salt, recv_salt = create_salts()
_send_key = send_salt + radius_encrypt_keys(
create_plain_text(send_key),
secret,
request_authenticator,
send_salt
)
_recv_key = recv_salt + radius_encrypt_keys(
create_plain_text(recv_key),
secret,
request_authenticator,
recv_salt
)
return _send_key, _recv_key
def radius_encrypt_keys(plain_text, secret, request_authenticator, salt):
"""
Construct a plaintext version of the String field by concate-
nating the Key-Length and Key sub-fields. If necessary, pad
the resulting string until its length (in octets) is an even
multiple of 16. It is recommended that zero octets (0x00) be
used for padding. Call this plaintext P.
Call the shared secret S, the pseudo-random 128-bit Request
Authenticator (from the corresponding Access-Request packet) R,
and the contents of the Salt field A. Break P into 16 octet
chunks p(1), p(2)...p(i), where i = len(P)/16. Call the
ciphertext blocks c(1), c(2)...c(i) and the final ciphertext C.
Intermediate values b(1), b(2)...c(i) are required. Encryption
is performed in the following manner ('+' indicates
concatenation):
b(1) = MD5(S + R + A) c(1) = p(1) xor b(1) C = c(1)
b(2) = MD5(S + c(1)) c(2) = p(2) xor b(2) C = C + c(2)
. .
. .
. .
b(i) = MD5(S + c(i-1)) c(i) = p(i) xor b(i) C = C + c(i)
The resulting encrypted String field will contain
c(1)+c(2)+...+c(i).
"""
i = int(len(plain_text) / 16)
b = hashlib.new("md5", secret + request_authenticator + salt.encode(errors='ignore')).digest()
c = xor(plain_text[:16], b)
result = c
for x in range(1, i):
b = hashlib.new("md5", secret + c.encode().digest())
c = xor(plain_text[x * 16:(x + 1) * 16], b)
result += c
return result
def xor(str1, str2):
str2=str2.decode(errors='ignore')
return ''.join(map(lambda s1, s2: chr(ord(s1) ^ ord(s2)), str1, str2))

131
py/libs/mschap3/mschap.py Normal file
View file

@ -0,0 +1,131 @@
#!/usr/bin/env python3
# coding=utf-8
from . import des
from . import md4
import hashlib
from . import utils
from Crypto.Hash import MD4
def challenge_hash(peer_challenge, authenticator_challenge, username):
"""ChallengeHash"""
sha_hash = hashlib.sha1()
sha_hash.update(peer_challenge)
sha_hash.update(authenticator_challenge)
sha_hash.update(username)
return sha_hash.digest()[:8]
def nt_password_hash(passwd):
"""NtPasswordHash"""
pw = utils.str2unicode(passwd)
md4_context = md4.new()
md4_context.update(pw)
# hashObject = MD4.new(passwd.encode('utf-8'))
# digest = hashObject.digest().decode()
return md4_context.digest()
def hash_nt_password_hash(password_hash):
"""HashNtPasswordHash"""
md4_context = md4.new()
md4_context.update(password_hash)
return md4_context.digest()
def generate_nt_response_mschap(challenge, password):
password_hash = nt_password_hash(password)
return challenge_response(challenge, password_hash)
def generate_nt_response_mschap2(authenticator_challenge, peer_challenge, username, password,nthash=False):
"""GenerateNTResponse"""
challenge = challenge_hash(peer_challenge, authenticator_challenge, username)
if nthash:
password_hash = bytes.fromhex(nthash).decode('iso8859-1',errors='ignore')
else:
password_hash = nt_password_hash(password)
return challenge_response(challenge, password_hash)
def challenge_response(challenge, password_hash):
"""ChallengeResponse"""
zpassword_hash = password_hash.ljust(21, '\0')
zpassword_hash = [ord(x) for x in zpassword_hash]
response = b""
des_obj = des.DES(zpassword_hash[0:7])
response += des_obj.encrypt(challenge)
des_obj = des.DES(zpassword_hash[7:14])
response += des_obj.encrypt(challenge)
des_obj = des.DES(zpassword_hash[14:21])
response += des_obj.encrypt(challenge)
return response
def generate_authenticator_response(password, nt_response, peer_challenge, authenticator_challenge, username,nthash=False):
"""GenerateAuthenticatorResponse"""
Magic1 = b"\x4D\x61\x67\x69\x63\x20\x73\x65\x72\x76\x65\x72\x20\x74\x6F\x20\x63\x6C\x69\x65\x6E\x74\x20\x73\x69\x67\x6E\x69\x6E\x67\x20\x63\x6F\x6E\x73\x74\x61\x6E\x74"
Magic2 = b"\x50\x61\x64\x20\x74\x6F\x20\x6D\x61\x6B\x65\x20\x69\x74\x20\x64\x6F\x20\x6D\x6F\x72\x65\x20\x74\x68\x61\x6E\x20\x6F\x6E\x65\x20\x69\x74\x65\x72\x61\x74\x69\x6F\x6E"
if nthash:
password_hash = bytes.fromhex(nthash).decode('iso8859-1',errors='ignore')
else:
password_hash = nt_password_hash(password)
password_hash_hash = hash_nt_password_hash(password_hash)
allenc=['iso8859-1']
for enc in allenc:
sha_hash = hashlib.sha1()
sha_hash.update(password_hash_hash.encode(enc,errors='ignore'))
sha_hash.update(nt_response)
sha_hash.update(Magic1)
digest = sha_hash.digest()
challenge = challenge_hash(peer_challenge, authenticator_challenge, username)
sha_hash = hashlib.sha1()
sha_hash.update(digest)
sha_hash.update(challenge)
sha_hash.update(Magic2)
digest = sha_hash.digest()
return "\x01S=" + convert_to_hex_string(digest)
def check_authenticator_response(password, nt_response, peer_challenge, authenticator_challenge, user_name, received_response):
"""CheckAuthenticatorResponse"""
my_resppnse = generate_authenticator_response(password, nt_response, peer_challenge, authenticator_challenge, user_name)
return my_resppnse == received_response
def convert_to_hex_string(string):
string=string.decode('iso8859-1',errors='ignore')
hex_str = ""
for c in string:
hex_tmp = hex(ord(c))[2:]
if len(hex_tmp) == 1:
hex_tmp = "0" + hex_tmp
hex_str += hex_tmp
return hex_str.upper()
def lm_password_hash(password):
ucase_password = password.upper()[:14]
while len(ucase_password) < 14:
ucase_password += "\0"
password_hash = des_hash(ucase_password[:7])
password_hash += des_hash(ucase_password[7:])
return password_hash
def des_hash(clear):
"""DesEncrypt"""
des_obj = des.DES(clear)
return des_obj.encrypt(r"KGS!@#$%")

587
py/libs/mschap3/ntlm.py Normal file
View file

@ -0,0 +1,587 @@
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>.
import struct
import base64
import hashlib
import hmac
import random
import re
import binascii
from socket import gethostname
from . import des
NTLM_NegotiateUnicode = 0x00000001
NTLM_NegotiateOEM = 0x00000002
NTLM_RequestTarget = 0x00000004
NTLM_Unknown9 = 0x00000008
NTLM_NegotiateSign = 0x00000010
NTLM_NegotiateSeal = 0x00000020
NTLM_NegotiateDatagram = 0x00000040
NTLM_NegotiateLanManagerKey = 0x00000080
NTLM_Unknown8 = 0x00000100
NTLM_NegotiateNTLM = 0x00000200
NTLM_NegotiateNTOnly = 0x00000400
NTLM_Anonymous = 0x00000800
NTLM_NegotiateOemDomainSupplied = 0x00001000
NTLM_NegotiateOemWorkstationSupplied = 0x00002000
NTLM_Unknown6 = 0x00004000
NTLM_NegotiateAlwaysSign = 0x00008000
NTLM_TargettypeDomain = 0x00010000
NTLM_TargettypeServer = 0x00020000
NTLM_TargettypeShare = 0x00040000
NTLM_NegotiateExtendedSecurity = 0x00080000
NTLM_NegotiateIdentify = 0x00100000
NTLM_Unknown5 = 0x00200000
NTLM_RequestNonNTSessionKey = 0x00400000
NTLM_NegotiateTargetInfo = 0x00800000
NTLM_Unknown4 = 0x01000000
NTLM_NegotiateVersion = 0x02000000
NTLM_Unknown3 = 0x04000000
NTLM_Unknown2 = 0x08000000
NTLM_Unknown1 = 0x10000000
NTLM_Negotiate128 = 0x20000000
NTLM_NegotiateKeyExchange = 0x40000000
NTLM_Negotiate56 = 0x80000000
# we send these flags with our type 1 message
NTLM_TYPE1_FLAGS = (
NTLM_NegotiateUnicode
| NTLM_NegotiateOEM
| NTLM_RequestTarget
| NTLM_NegotiateNTLM
| NTLM_NegotiateOemDomainSupplied
| NTLM_NegotiateOemWorkstationSupplied
| NTLM_NegotiateAlwaysSign
| NTLM_NegotiateExtendedSecurity
| NTLM_NegotiateVersion
| NTLM_Negotiate128
| NTLM_Negotiate56
)
NTLM_TYPE2_FLAGS = (
NTLM_NegotiateUnicode
| NTLM_RequestTarget
| NTLM_NegotiateNTLM
| NTLM_NegotiateAlwaysSign
| NTLM_NegotiateExtendedSecurity
| NTLM_NegotiateTargetInfo
| NTLM_NegotiateVersion
| NTLM_Negotiate128
| NTLM_Negotiate56
)
NTLM_MsvAvEOL = 0 # Indicates that this is the last AV_PAIR in the list. AvLen MUST be 0. This type of information MUST be present in the AV pair list.
NTLM_MsvAvNbComputerName = 1 # The server's NetBIOS computer name. The name MUST be in Unicode, and is not null-terminated. This type of information MUST be present in the AV_pair list.
NTLM_MsvAvNbDomainName = 2 # The server's NetBIOS domain name. The name MUST be in Unicode, and is not null-terminated. This type of information MUST be present in the AV_pair list.
NTLM_MsvAvDnsComputerName = 3 # The server's Active Directory DNS computer name. The name MUST be in Unicode, and is not null-terminated.
NTLM_MsvAvDnsDomainName = 4 # The server's Active Directory DNS domain name. The name MUST be in Unicode, and is not null-terminated.
NTLM_MsvAvDnsTreeName = 5 # The server's Active Directory (AD) DNS forest tree name. The name MUST be in Unicode, and is not null-terminated.
NTLM_MsvAvFlags = 6 # A field containing a 32-bit value indicating server or client configuration. 0x00000001: indicates to the client that the account authentication is constrained. 0x00000002: indicates that the client is providing message integrity in the MIC field (section 2.2.1.3) in the AUTHENTICATE_MESSAGE.
NTLM_MsvAvTimestamp = 7 # A FILETIME structure ([MS-DTYP] section 2.3.1) in little-endian byte order that contains the server local time.<12>
NTLM_MsAvRestrictions = 8 # A Restriction_Encoding structure (section 2.2.2.2). The Value field contains a structure representing the integrity level of the security principal, as well as a MachineID created at computer startup to identify the calling machine. <13>
"""
utility functions for Microsoft NTLM authentication
References:
[MS-NLMP]: NT LAN Manager (NTLM) Authentication Protocol Specification
http://download.microsoft.com/download/a/e/6/ae6e4142-aa58-45c6-8dcf-a657e5900cd3/%5BMS-NLMP%5D.pdf
[MS-NTHT]: NTLM Over HTTP Protocol Specification
http://download.microsoft.com/download/a/e/6/ae6e4142-aa58-45c6-8dcf-a657e5900cd3/%5BMS-NTHT%5D.pdf
Cntlm Authentication Proxy
http://cntlm.awk.cz/
NTLM Authorization Proxy Server
http://sourceforge.net/projects/ntlmaps/
Optimized Attack for NTLM2 Session Response
http://www.blackhat.com/presentations/bh-asia-04/bh-jp-04-pdfs/bh-jp-04-seki.pdf
"""
def dump_NegotiateFlags(NegotiateFlags):
if NegotiateFlags & NTLM_NegotiateUnicode:
print("NTLM_NegotiateUnicode set")
if NegotiateFlags & NTLM_NegotiateOEM:
print("NTLM_NegotiateOEM set")
if NegotiateFlags & NTLM_RequestTarget:
print("NTLM_RequestTarget set")
if NegotiateFlags & NTLM_Unknown9:
print("NTLM_Unknown9 set")
if NegotiateFlags & NTLM_NegotiateSign:
print("NTLM_NegotiateSign set")
if NegotiateFlags & NTLM_NegotiateSeal:
print("NTLM_NegotiateSeal set")
if NegotiateFlags & NTLM_NegotiateDatagram:
print("NTLM_NegotiateDatagram set")
if NegotiateFlags & NTLM_NegotiateLanManagerKey:
print("NTLM_NegotiateLanManagerKey set")
if NegotiateFlags & NTLM_Unknown8:
print("NTLM_Unknown8 set")
if NegotiateFlags & NTLM_NegotiateNTLM:
print("NTLM_NegotiateNTLM set")
if NegotiateFlags & NTLM_NegotiateNTOnly:
print("NTLM_NegotiateNTOnly set")
if NegotiateFlags & NTLM_Anonymous:
print("NTLM_Anonymous set")
if NegotiateFlags & NTLM_NegotiateOemDomainSupplied:
print("NTLM_NegotiateOemDomainSupplied set")
if NegotiateFlags & NTLM_NegotiateOemWorkstationSupplied:
print("NTLM_NegotiateOemWorkstationSupplied set")
if NegotiateFlags & NTLM_Unknown6:
print("NTLM_Unknown6 set")
if NegotiateFlags & NTLM_NegotiateAlwaysSign:
print("NTLM_NegotiateAlwaysSign set")
if NegotiateFlags & NTLM_TargettypeDomain:
print("NTLM_TargettypeDomain set")
if NegotiateFlags & NTLM_TargettypeServer:
print("NTLM_TargettypeServer set")
if NegotiateFlags & NTLM_TargettypeShare:
print("NTLM_TargettypeShare set")
if NegotiateFlags & NTLM_NegotiateExtendedSecurity:
print("NTLM_NegotiateExtendedSecurity set")
if NegotiateFlags & NTLM_NegotiateIdentify:
print("NTLM_NegotiateIdentify set")
if NegotiateFlags & NTLM_Unknown5:
print("NTLM_Unknown5 set")
if NegotiateFlags & NTLM_RequestNonNTSessionKey:
print("NTLM_RequestNonNTSessionKey set")
if NegotiateFlags & NTLM_NegotiateTargetInfo:
print("NTLM_NegotiateTargetInfo set")
if NegotiateFlags & NTLM_Unknown4:
print("NTLM_Unknown4 set")
if NegotiateFlags & NTLM_NegotiateVersion:
print("NTLM_NegotiateVersion set")
if NegotiateFlags & NTLM_Unknown3:
print("NTLM_Unknown3 set")
if NegotiateFlags & NTLM_Unknown2:
print("NTLM_Unknown2 set")
if NegotiateFlags & NTLM_Unknown1:
print("NTLM_Unknown1 set")
if NegotiateFlags & NTLM_Negotiate128:
print("NTLM_Negotiate128 set")
if NegotiateFlags & NTLM_NegotiateKeyExchange:
print("NTLM_NegotiateKeyExchange set")
if NegotiateFlags & NTLM_Negotiate56:
print("NTLM_Negotiate56 set")
def create_NTLM_NEGOTIATE_MESSAGE(user, type1_flags=NTLM_TYPE1_FLAGS):
BODY_LENGTH = 40
Payload_start = BODY_LENGTH # in bytes
protocol = b"NTLMSSP\0" # name
type = struct.pack("<I", 1) # type 1
flags = struct.pack("<I", type1_flags)
Workstation = bytes(gethostname().upper(), "ascii")
user_parts = user.split("\\", 1)
DomainName = bytes(user_parts[0].upper(), "ascii")
EncryptedRandomSessionKey = ""
WorkstationLen = struct.pack("<H", len(Workstation))
WorkstationMaxLen = struct.pack("<H", len(Workstation))
WorkstationBufferOffset = struct.pack("<I", Payload_start)
Payload_start += len(Workstation)
DomainNameLen = struct.pack("<H", len(DomainName))
DomainNameMaxLen = struct.pack("<H", len(DomainName))
DomainNameBufferOffset = struct.pack("<I", Payload_start)
Payload_start += len(DomainName)
ProductMajorVersion = struct.pack("<B", 5)
ProductMinorVersion = struct.pack("<B", 1)
ProductBuild = struct.pack("<H", 2600)
VersionReserved1 = struct.pack("<B", 0)
VersionReserved2 = struct.pack("<B", 0)
VersionReserved3 = struct.pack("<B", 0)
NTLMRevisionCurrent = struct.pack("<B", 15)
msg1 = (
protocol
+ type
+ flags
+ DomainNameLen
+ DomainNameMaxLen
+ DomainNameBufferOffset
+ WorkstationLen
+ WorkstationMaxLen
+ WorkstationBufferOffset
+ ProductMajorVersion
+ ProductMinorVersion
+ ProductBuild
+ VersionReserved1
+ VersionReserved2
+ VersionReserved3
+ NTLMRevisionCurrent
)
assert BODY_LENGTH == len(msg1), "BODY_LENGTH: %d != msg1: %d" % (
BODY_LENGTH,
len(msg1),
)
msg1 += Workstation + DomainName
msg1 = base64.b64encode(msg1)
return msg1.decode()
def parse_NTLM_CHALLENGE_MESSAGE(msg2):
""""""
msg2 = base64.b64decode(bytes(msg2, "ascii"))
Signature = msg2[0:8]
msg_type = struct.unpack("<I", msg2[8:12])[0]
assert msg_type == 2
TargetNameLen = struct.unpack("<H", msg2[12:14])[0]
TargetNameMaxLen = struct.unpack("<H", msg2[14:16])[0]
TargetNameOffset = struct.unpack("<I", msg2[16:20])[0]
TargetName = msg2[TargetNameOffset : TargetNameOffset + TargetNameMaxLen]
NegotiateFlags = struct.unpack("<I", msg2[20:24])[0]
ServerChallenge = msg2[24:32]
if NegotiateFlags & NTLM_NegotiateTargetInfo:
Reserved = msg2[32:40]
TargetInfoLen = struct.unpack("<H", msg2[40:42])[0]
TargetInfoMaxLen = struct.unpack("<H", msg2[42:44])[0]
TargetInfoOffset = struct.unpack("<I", msg2[44:48])[0]
TargetInfo = msg2[TargetInfoOffset : TargetInfoOffset + TargetInfoLen]
i = 0
TimeStamp = "\0" * 8
while i < TargetInfoLen:
AvId = struct.unpack("<H", TargetInfo[i : i + 2])[0]
AvLen = struct.unpack("<H", TargetInfo[i + 2 : i + 4])[0]
AvValue = TargetInfo[i + 4 : i + 4 + AvLen]
i = i + 4 + AvLen
if AvId == NTLM_MsvAvTimestamp:
TimeStamp = AvValue
# ~ print AvId, AvValue.decode('utf-16')
return (ServerChallenge, NegotiateFlags)
def create_NTLM_AUTHENTICATE_MESSAGE(nonce, user, domain, password, NegotiateFlags):
""""""
is_unicode = NegotiateFlags & NTLM_NegotiateUnicode
is_NegotiateExtendedSecurity = NegotiateFlags & NTLM_NegotiateExtendedSecurity
flags = struct.pack("<I", NTLM_TYPE2_FLAGS)
BODY_LENGTH = 72
Payload_start = BODY_LENGTH # in bytes
Workstation = bytes(gethostname().upper(), "ascii")
DomainName = bytes(domain.upper(), "ascii")
UserName = bytes(user, "ascii")
EncryptedRandomSessionKey = b""
if is_unicode:
Workstation = bytes(gethostname().upper(), "utf-16-le")
DomainName = bytes(domain.upper(), "utf-16-le")
UserName = bytes(user, "utf-16-le")
EncryptedRandomSessionKey = bytes("", "utf-16-le")
LmChallengeResponse = calc_resp(create_LM_hashed_password_v1(password), nonce)
NtChallengeResponse = calc_resp(create_NT_hashed_password_v1(password), nonce)
if is_NegotiateExtendedSecurity:
pwhash = create_NT_hashed_password_v1(password, UserName, DomainName)
ClientChallenge = b""
for i in range(8):
ClientChallenge += bytes((random.getrandbits(8),))
(NtChallengeResponse, LmChallengeResponse) = ntlm2sr_calc_resp(
pwhash, nonce, ClientChallenge
) # ='\x39 e3 f4 cd 59 c5 d8 60')
Signature = b"NTLMSSP\0"
Messagetype = struct.pack("<I", 3) # type 3
DomainNameLen = struct.pack("<H", len(DomainName))
DomainNameMaxLen = struct.pack("<H", len(DomainName))
DomainNameOffset = struct.pack("<I", Payload_start)
Payload_start += len(DomainName)
UserNameLen = struct.pack("<H", len(UserName))
UserNameMaxLen = struct.pack("<H", len(UserName))
UserNameOffset = struct.pack("<I", Payload_start)
Payload_start += len(UserName)
WorkstationLen = struct.pack("<H", len(Workstation))
WorkstationMaxLen = struct.pack("<H", len(Workstation))
WorkstationOffset = struct.pack("<I", Payload_start)
Payload_start += len(Workstation)
LmChallengeResponseLen = struct.pack("<H", len(LmChallengeResponse))
LmChallengeResponseMaxLen = struct.pack("<H", len(LmChallengeResponse))
LmChallengeResponseOffset = struct.pack("<I", Payload_start)
Payload_start += len(LmChallengeResponse)
NtChallengeResponseLen = struct.pack("<H", len(NtChallengeResponse))
NtChallengeResponseMaxLen = struct.pack("<H", len(NtChallengeResponse))
NtChallengeResponseOffset = struct.pack("<I", Payload_start)
Payload_start += len(NtChallengeResponse)
EncryptedRandomSessionKeyLen = struct.pack("<H", len(EncryptedRandomSessionKey))
EncryptedRandomSessionKeyMaxLen = struct.pack("<H", len(EncryptedRandomSessionKey))
EncryptedRandomSessionKeyOffset = struct.pack("<I", Payload_start)
Payload_start += len(EncryptedRandomSessionKey)
NegotiateFlags = flags
ProductMajorVersion = struct.pack("<B", 5)
ProductMinorVersion = struct.pack("<B", 1)
ProductBuild = struct.pack("<H", 2600)
VersionReserved1 = struct.pack("<B", 0)
VersionReserved2 = struct.pack("<B", 0)
VersionReserved3 = struct.pack("<B", 0)
NTLMRevisionCurrent = struct.pack("<B", 15)
MIC = struct.pack("<IIII", 0, 0, 0, 0)
msg3 = (
Signature
+ Messagetype
+ LmChallengeResponseLen
+ LmChallengeResponseMaxLen
+ LmChallengeResponseOffset
+ NtChallengeResponseLen
+ NtChallengeResponseMaxLen
+ NtChallengeResponseOffset
+ DomainNameLen
+ DomainNameMaxLen
+ DomainNameOffset
+ UserNameLen
+ UserNameMaxLen
+ UserNameOffset
+ WorkstationLen
+ WorkstationMaxLen
+ WorkstationOffset
+ EncryptedRandomSessionKeyLen
+ EncryptedRandomSessionKeyMaxLen
+ EncryptedRandomSessionKeyOffset
+ NegotiateFlags
+ ProductMajorVersion
+ ProductMinorVersion
+ ProductBuild
+ VersionReserved1
+ VersionReserved2
+ VersionReserved3
+ NTLMRevisionCurrent
)
assert BODY_LENGTH == len(msg3), "BODY_LENGTH: %d != msg3: %d" % (
BODY_LENGTH,
len(msg3),
)
Payload = (
DomainName
+ UserName
+ Workstation
+ LmChallengeResponse
+ NtChallengeResponse
+ EncryptedRandomSessionKey
)
msg3 += Payload
msg3 = base64.b64encode(msg3)
return msg3.decode()
def calc_resp(password_hash, server_challenge):
"""calc_resp generates the LM response given a 16-byte password hash and the
challenge from the type-2 message.
@param password_hash
16-byte password hash
@param server_challenge
8-byte challenge from type-2 message
returns
24-byte buffer to contain the LM response upon return
"""
# padding with zeros to make the hash 21 bytes long
password_hash = password_hash + b"\0" * (21 - len(password_hash))
res = b""
dobj = des.DES(password_hash[0:7])
res = res + dobj.encrypt(server_challenge[0:8])
dobj = des.DES(password_hash[7:14])
res = res + dobj.encrypt(server_challenge[0:8])
dobj = des.DES(password_hash[14:21])
res = res + dobj.encrypt(server_challenge[0:8])
return res
def ComputeResponse(
ResponseKeyNT,
ResponseKeyLM,
ServerChallenge,
ServerName,
ClientChallenge="\xaa" * 8,
Time="\0" * 8,
):
LmChallengeResponse = (
hmac.new(ResponseKeyLM, ServerChallenge + ClientChallenge).digest()
+ ClientChallenge
)
Responserversion = b"\x01"
HiResponserversion = b"\x01"
temp = (
Responserversion
+ HiResponserversion
+ b"\0" * 6
+ Time
+ ClientChallenge
+ b"\0" * 4
+ ServerChallenge
+ b"\0" * 4
)
NTProofStr = hmac.new(ResponseKeyNT, ServerChallenge + temp).digest()
NtChallengeResponse = NTProofStr + temp
SessionBaseKey = hmac.new(ResponseKeyNT, NTProofStr).digest()
return (NtChallengeResponse, LmChallengeResponse)
def ntlm2sr_calc_resp(ResponseKeyNT, ServerChallenge, ClientChallenge=b"\xaa" * 8):
import hashlib
LmChallengeResponse = ClientChallenge + b"\0" * 16
sess = hashlib.md5(ServerChallenge + ClientChallenge).digest()
NtChallengeResponse = calc_resp(ResponseKeyNT, sess[0:8])
return (NtChallengeResponse, LmChallengeResponse)
def create_LM_hashed_password_v1(passwd):
"setup LanManager password"
"create LanManager hashed password"
# if the passwd provided is already a hash, we just return the first half
if re.match(r"^[\w]{32}:[\w]{32}$", passwd):
return binascii.unhexlify(passwd.split(":")[0])
# fix the password length to 14 bytes
passwd = passwd.upper()
lm_pw = passwd + "\0" * (14 - len(passwd))
lm_pw = bytes(passwd[0:14], "utf8")
# do hash
magic_str = b"KGS!@#$%" # page 57 in [MS-NLMP]
res = b""
dobj = des.DES(lm_pw[0:7])
res = res + dobj.encrypt(magic_str)
dobj = des.DES(lm_pw[7:14])
res = res + dobj.encrypt(magic_str)
return res
def create_NT_hashed_password_v1(passwd, user=None, domain=None):
"create NT hashed password"
# if the passwd provided is already a hash, we just return the second half
if re.match(r"^[\w]{32}:[\w]{32}$", passwd):
return binascii.unhexlify(passwd.split(":")[1])
digest = hashlib.new("md4", passwd.encode("utf-16le")).digest()
return digest
def create_NT_hashed_password_v2(passwd, user, domain):
"create NT hashed password"
digest = create_NT_hashed_password_v1(passwd)
return hmac.new(digest, (user.upper() + domain).encode("utf-16le")).digest()
return digest
def create_sessionbasekey(password):
return hashlib.new("md4", create_NT_hashed_password_v1(password)).digest()
if __name__ == "__main__":
from binascii import unhexlify, hexlify
def ByteToHex(byteStr):
"""
Convert a byte string to it's hex string representation e.g. for output.
"""
return " ".join(["%02X" % x for x in byteStr])
def HexToByte(hexStr):
"""
Convert a string hex byte values into a byte string. The Hex Byte values may
or may not be space separated.
"""
hexStr = "".join(hexStr.split(" "))
return unhexlify(hexStr)
ServerChallenge = HexToByte("01 23 45 67 89 ab cd ef")
ClientChallenge = b"\xaa" * 8
Time = b"\x00" * 8
Workstation = "COMPUTER".encode("utf-16-le")
ServerName = "Server".encode("utf-16-le")
User = "User"
Domain = "Domain"
Password = "Password"
RandomSessionKey = "\55" * 16
assert HexToByte(
"e5 2c ac 67 41 9a 9a 22 4a 3b 10 8f 3f a6 cb 6d"
) == create_LM_hashed_password_v1(
Password
) # [MS-NLMP] page 72
assert HexToByte(
"a4 f4 9c 40 65 10 bd ca b6 82 4e e7 c3 0f d8 52"
) == create_NT_hashed_password_v1(
Password
) # [MS-NLMP] page 73
assert HexToByte(
"d8 72 62 b0 cd e4 b1 cb 74 99 be cc cd f1 07 84"
) == create_sessionbasekey(Password)
assert HexToByte(
"67 c4 30 11 f3 02 98 a2 ad 35 ec e6 4f 16 33 1c 44 bd be d9 27 84 1f 94"
) == calc_resp(create_NT_hashed_password_v1(Password), ServerChallenge)
assert HexToByte(
"98 de f7 b8 7f 88 aa 5d af e2 df 77 96 88 a1 72 de f1 1c 7d 5c cd ef 13"
) == calc_resp(create_LM_hashed_password_v1(Password), ServerChallenge)
(NTLMv1Response, LMv1Response) = ntlm2sr_calc_resp(
create_NT_hashed_password_v1(Password), ServerChallenge, ClientChallenge
)
assert (
HexToByte(
"aa aa aa aa aa aa aa aa 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00"
)
== LMv1Response
) # [MS-NLMP] page 75
assert (
HexToByte(
"75 37 f8 03 ae 36 71 28 ca 45 82 04 bd e7 ca f8 1e 97 ed 26 83 26 72 32"
)
== NTLMv1Response
)
assert HexToByte(
"0c 86 8a 40 3b fd 7a 93 a3 00 1e f2 2e f0 2e 3f"
) == create_NT_hashed_password_v2(
Password, User, Domain
) # [MS-NLMP] page 76
ResponseKeyLM = ResponseKeyNT = create_NT_hashed_password_v2(Password, User, Domain)
(NTLMv2Response, LMv2Response) = ComputeResponse(
ResponseKeyNT, ResponseKeyLM, ServerChallenge, ServerName, ClientChallenge, Time
)
assert (
HexToByte(
"86 c3 50 97 ac 9c ec 10 25 54 76 4a 57 cc cc 19 aa aa aa aa aa aa aa aa"
)
== LMv2Response
) # [MS-NLMP] page 76
assert (
"TlRMTVNTUAABAAAAB7IIogYABgAwAAAACAAIACgAAAAFASgKAAAAD1dTMDQyMzc4RE9NQUlO"
== create_NTLM_NEGOTIATE_MESSAGE("DOMAIN\\User")
)
# expected failure
# According to the spec in section '3.3.2 NTLM v2 Authentication' the NTLMv2Response should be longer than the value given on page 77 (this suggests a mistake in the spec)
# ~ assert HexToByte("68 cd 0a b8 51 e5 1c 96 aa bc 92 7b eb ef 6a 1c") == NTLMv2Response, "\nExpected: 68 cd 0a b8 51 e5 1c 96 aa bc 92 7b eb ef 6a 1c\nActual: %s" % ByteToHex(NTLMv2Response) # [MS-NLMP] page 77

125
py/libs/mschap3/utils.py Executable file
View file

@ -0,0 +1,125 @@
# This file is part of 'NTLM Authorization Proxy Server'
# Copyright 2001 Dmitry A. Rozmanov <dima@xenon.spb.ru>
#
# NTLM APS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# NTLM APS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the sofware; see the file COPYING. If not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#
import string
hd = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F',]
#--------------------------------------------------------------------------------------------
def str2hex_num(str):
res = 0
for i in str:
res = res << 8
res = res + int(ord(i))
return hex(res)
#--------------------------------------------------------------------------------------------
def str2hex(str, delimiter=''):
res = ''
for i in str:
res = res + hd[ord(i)/16]
res = res + hd[ord(i) - ((ord(i)/16) * 16)]
res = res + delimiter
return res
#--------------------------------------------------------------------------------------------
def str2dec(str, delimiter=''):
res = ''
for i in str:
res = res + '%3d' % ord(i)
res = res + delimiter
return res
#--------------------------------------------------------------------------------------------
def hex2str(hex_str):
res = ''
for i in range(0, len(hex_str), 2):
res = res + (chr(hd.index(hex_str[i]) * 16 + hd.index(hex_str[i+1])))
return res
#--------------------------------------------------------------------------------------------
def str2prn_str(bin_str, delimiter=''):
""
res = ''
for i in bin_str:
if ord(i) > 31: res = res + i
else: res = res + '.'
res = res + delimiter
return res
#--------------------------------------------------------------------------------------------
def byte2bin_str(char):
""
res = ''
t = ord(char)
while t > 0:
t1 = t / 2
if t != 2 * t1: res = '1' + res
else: res = '0' + res
t = t1
if len(res) < 8: res = '0' * (8 - len(res)) + res
return res
#--------------------------------------------------------------------------------------------
def str2lst(str):
res = []
for i in str:
res.append(ord(i))
return res
#--------------------------------------------------------------------------------------------
def lst2str(lst):
res = ''
for i in lst:
res = res + chr(i & 0xFF)
return res
#--------------------------------------------------------------------------------------------
def int2chrs(number_int):
""
return chr(number_int & 0xFF) + chr((number_int >> 8) & 0xFF)
#--------------------------------------------------------------------------------------------
def bytes2int(bytes):
""
return ord(bytes[1]) * 256 + ord(bytes[0])
#--------------------------------------------------------------------------------------------
def int2hex_str(number_int16):
""
res = '0x'
ph = int(number_int16) / 256
res = res + hd[ph/16]
res = res + hd[ph - ((ph/16) * 16)]
pl = int(number_int16) - (ph * 256)
res = res + hd[pl/16]
res = res + hd[pl - ((pl/16) * 16)]
return res
#--------------------------------------------------------------------------------------------
def str2unicode(string):
"converts ascii string to dumb unicode"
res = ''
for i in string:
res = res + i + '\000'
return res

407
py/libs/raddic/dictionary Normal file
View file

@ -0,0 +1,407 @@
#
# Version $Id: dictionary,v 1.1.1.1 2002/10/11 12:25:39 wichert Exp $
#
# This file contains dictionary translations for parsing
# requests and generating responses. All transactions are
# composed of Attribute/Value Pairs. The value of each attribute
# is specified as one of 4 data types. Valid data types are:
#
# string - 0-253 octets
# ipaddr - 4 octets in network byte order
# integer - 32 bit value in big endian order (high byte first)
# date - 32 bit value in big endian order - seconds since
# 00:00:00 GMT, Jan. 1, 1970
#
# FreeRADIUS includes extended data types which are not defined
# in RFC 2865 or RFC 2866. These data types are:
#
# abinary - Ascend's binary filter format.
# octets - raw octets, printed and input as hex strings.
# e.g.: 0x123456789abcdef
#
#
# Enumerated values are stored in the user file with dictionary
# VALUE translations for easy administration.
#
# Example:
#
# ATTRIBUTE VALUE
# --------------- -----
# Framed-Protocol = PPP
# 7 = 1 (integer encoding)
#
#
# Include compatibility dictionary for older users file. Move this
# directive to the end of the file if you want to see the old names
# in the logfiles too.
#
#$INCLUDE dictionary.compat # compability issues
#$INCLUDE dictionary.acc
#$INCLUDE dictionary.ascend
#$INCLUDE dictionary.bay
#$INCLUDE dictionary.cisco
#$INCLUDE dictionary.livingston
$INCLUDE dictionary.microsoft
#$INCLUDE dictionary.quintum
#$INCLUDE dictionary.redback
#$INCLUDE dictionary.shasta
#$INCLUDE dictionary.shiva
#$INCLUDE dictionary.tunnel
#$INCLUDE dictionary.usr
#$INCLUDE dictionary.versanet
#$INCLUDE dictionary.erx
$INCLUDE dictionary.freeradius
#$INCLUDE dictionary.alcatel
#
# Following are the proper new names. Use these.
#
ATTRIBUTE User-Name 1 string
ATTRIBUTE User-Password 2 string
ATTRIBUTE CHAP-Password 3 octets
ATTRIBUTE NAS-IP-Address 4 ipaddr
ATTRIBUTE NAS-Port 5 integer
ATTRIBUTE Service-Type 6 integer
ATTRIBUTE Framed-Protocol 7 integer
ATTRIBUTE Framed-IP-Address 8 ipaddr
ATTRIBUTE Framed-IP-Netmask 9 ipaddr
ATTRIBUTE Framed-Routing 10 integer
ATTRIBUTE Filter-Id 11 string
ATTRIBUTE Framed-MTU 12 integer
ATTRIBUTE Framed-Compression 13 integer
ATTRIBUTE Login-IP-Host 14 ipaddr
ATTRIBUTE Login-Service 15 integer
ATTRIBUTE Login-TCP-Port 16 integer
ATTRIBUTE Reply-Message 18 string
ATTRIBUTE Callback-Number 19 string
ATTRIBUTE Callback-Id 20 string
ATTRIBUTE Framed-Route 22 string
ATTRIBUTE Framed-IPX-Network 23 ipaddr
ATTRIBUTE State 24 octets
ATTRIBUTE Class 25 octets
ATTRIBUTE Vendor-Specific 26 octets
ATTRIBUTE Session-Timeout 27 integer
ATTRIBUTE Idle-Timeout 28 integer
ATTRIBUTE Termination-Action 29 integer
ATTRIBUTE Called-Station-Id 30 string
ATTRIBUTE Calling-Station-Id 31 string
ATTRIBUTE NAS-Identifier 32 string
ATTRIBUTE Proxy-State 33 octets
ATTRIBUTE Login-LAT-Service 34 string
ATTRIBUTE Login-LAT-Node 35 string
ATTRIBUTE Login-LAT-Group 36 octets
ATTRIBUTE Framed-AppleTalk-Link 37 integer
ATTRIBUTE Framed-AppleTalk-Network 38 integer
ATTRIBUTE Framed-AppleTalk-Zone 39 string
ATTRIBUTE Acct-Status-Type 40 integer
ATTRIBUTE Acct-Delay-Time 41 integer
ATTRIBUTE Acct-Input-Octets 42 integer
ATTRIBUTE Acct-Output-Octets 43 integer
ATTRIBUTE Acct-Session-Id 44 string
ATTRIBUTE Acct-Authentic 45 integer
ATTRIBUTE Acct-Session-Time 46 integer
ATTRIBUTE Acct-Input-Packets 47 integer
ATTRIBUTE Acct-Output-Packets 48 integer
ATTRIBUTE Acct-Terminate-Cause 49 integer
ATTRIBUTE Acct-Multi-Session-Id 50 string
ATTRIBUTE Acct-Link-Count 51 integer
ATTRIBUTE Acct-Input-Gigawords 52 integer
ATTRIBUTE Acct-Output-Gigawords 53 integer
ATTRIBUTE Event-Timestamp 55 date
ATTRIBUTE CHAP-Challenge 60 string
ATTRIBUTE NAS-Port-Type 61 integer
ATTRIBUTE Port-Limit 62 integer
ATTRIBUTE Login-LAT-Port 63 integer
ATTRIBUTE Acct-Tunnel-Connection 68 string
ATTRIBUTE ARAP-Password 70 string
ATTRIBUTE ARAP-Features 71 string
ATTRIBUTE ARAP-Zone-Access 72 integer
ATTRIBUTE ARAP-Security 73 integer
ATTRIBUTE ARAP-Security-Data 74 string
ATTRIBUTE Password-Retry 75 integer
ATTRIBUTE Prompt 76 integer
ATTRIBUTE Connect-Info 77 string
ATTRIBUTE Configuration-Token 78 string
ATTRIBUTE EAP-Message 79 string
ATTRIBUTE Message-Authenticator 80 octets
ATTRIBUTE ARAP-Challenge-Response 84 string # 10 octets
ATTRIBUTE Acct-Interim-Interval 85 integer
ATTRIBUTE NAS-Port-Id 87 string
ATTRIBUTE Framed-Pool 88 string
ATTRIBUTE NAS-IPv6-Address 95 octets # really IPv6
ATTRIBUTE Framed-Interface-Id 96 octets # 8 octets
ATTRIBUTE Framed-IPv6-Prefix 97 ipv6prefix # stupid format
ATTRIBUTE Login-IPv6-Host 98 octets # really IPv6
ATTRIBUTE Framed-IPv6-Route 99 string
ATTRIBUTE Framed-IPv6-Pool 100 string
ATTRIBUTE Delegated-IPv6-Prefix 123 ipv6prefix
ATTRIBUTE Digest-Response 206 string
ATTRIBUTE Digest-Attributes 207 octets # stupid format
#
# Experimental Non Protocol Attributes used by Cistron-Radiusd
#
# These attributes CAN go in the reply item list.
ATTRIBUTE Fall-Through 500 integer
ATTRIBUTE Exec-Program 502 string
ATTRIBUTE Exec-Program-Wait 503 string
# These attributes CANNOT go in the reply item list.
ATTRIBUTE User-Category 1029 string
ATTRIBUTE Group-Name 1030 string
ATTRIBUTE Huntgroup-Name 1031 string
ATTRIBUTE Simultaneous-Use 1034 integer
ATTRIBUTE Strip-User-Name 1035 integer
ATTRIBUTE Hint 1040 string
ATTRIBUTE Pam-Auth 1041 string
ATTRIBUTE Login-Time 1042 string
ATTRIBUTE Stripped-User-Name 1043 string
ATTRIBUTE Current-Time 1044 string
ATTRIBUTE Realm 1045 string
ATTRIBUTE No-Such-Attribute 1046 string
ATTRIBUTE Packet-Type 1047 integer
ATTRIBUTE Proxy-To-Realm 1048 string
ATTRIBUTE Replicate-To-Realm 1049 string
ATTRIBUTE Acct-Session-Start-Time 1050 date
ATTRIBUTE Acct-Unique-Session-Id 1051 string
ATTRIBUTE Client-IP-Address 1052 ipaddr
ATTRIBUTE Ldap-UserDn 1053 string
ATTRIBUTE NS-MTA-MD5-Password 1054 string
ATTRIBUTE SQL-User-Name 1055 string
ATTRIBUTE LM-Password 1057 octets
ATTRIBUTE NT-Password 1058 octets
ATTRIBUTE SMB-Account-CTRL 1059 integer
ATTRIBUTE SMB-Account-CTRL-TEXT 1061 string
ATTRIBUTE User-Profile 1062 string
ATTRIBUTE Digest-Realm 1063 string
ATTRIBUTE Digest-Nonce 1064 string
ATTRIBUTE Digest-Method 1065 string
ATTRIBUTE Digest-URI 1066 string
ATTRIBUTE Digest-QOP 1067 string
ATTRIBUTE Digest-Algorithm 1068 string
ATTRIBUTE Digest-Body-Digest 1069 string
ATTRIBUTE Digest-CNonce 1070 string
ATTRIBUTE Digest-Nonce-Count 1071 string
ATTRIBUTE Digest-User-Name 1072 string
ATTRIBUTE Pool-Name 1073 string
ATTRIBUTE Ldap-Group 1074 string
ATTRIBUTE Module-Success-Message 1075 string
ATTRIBUTE Module-Failure-Message 1076 string
# X99-Fast 1077 integer
#
# Non-Protocol Attributes
# These attributes are used internally by the server
#
ATTRIBUTE Auth-Type 1000 integer
ATTRIBUTE Menu 1001 string
ATTRIBUTE Termination-Menu 1002 string
ATTRIBUTE Prefix 1003 string
ATTRIBUTE Suffix 1004 string
ATTRIBUTE Group 1005 string
ATTRIBUTE Crypt-Password 1006 string
ATTRIBUTE Connect-Rate 1007 integer
ATTRIBUTE Add-Prefix 1008 string
ATTRIBUTE Add-Suffix 1009 string
ATTRIBUTE Expiration 1010 date
ATTRIBUTE Autz-Type 1011 integer
#
# Integer Translations
#
# User Types
VALUE Service-Type Login-User 1
VALUE Service-Type Framed-User 2
VALUE Service-Type Callback-Login-User 3
VALUE Service-Type Callback-Framed-User 4
VALUE Service-Type Outbound-User 5
VALUE Service-Type Administrative-User 6
VALUE Service-Type NAS-Prompt-User 7
VALUE Service-Type Authenticate-Only 8
VALUE Service-Type Callback-NAS-Prompt 9
VALUE Service-Type Call-Check 10
VALUE Service-Type Callback-Administrative 11
# Framed Protocols
VALUE Framed-Protocol PPP 1
VALUE Framed-Protocol SLIP 2
VALUE Framed-Protocol ARAP 3
VALUE Framed-Protocol Gandalf-SLML 4
VALUE Framed-Protocol Xylogics-IPX-SLIP 5
VALUE Framed-Protocol X.75-Synchronous 6
# Framed Routing Values
VALUE Framed-Routing None 0
VALUE Framed-Routing Broadcast 1
VALUE Framed-Routing Listen 2
VALUE Framed-Routing Broadcast-Listen 3
# Framed Compression Types
VALUE Framed-Compression None 0
VALUE Framed-Compression Van-Jacobson-TCP-IP 1
VALUE Framed-Compression IPX-Header-Compression 2
VALUE Framed-Compression Stac-LZS 3
# Login Services
VALUE Login-Service Telnet 0
VALUE Login-Service Rlogin 1
VALUE Login-Service TCP-Clear 2
VALUE Login-Service PortMaster 3
VALUE Login-Service LAT 4
VALUE Login-Service X25-PAD 5
VALUE Login-Service X25-T3POS 6
VALUE Login-Service TCP-Clear-Quiet 8
# Login-TCP-Port (see /etc/services for more examples)
VALUE Login-TCP-Port Telnet 23
VALUE Login-TCP-Port Rlogin 513
VALUE Login-TCP-Port Rsh 514
# Status Types
VALUE Acct-Status-Type Start 1
VALUE Acct-Status-Type Stop 2
VALUE Acct-Status-Type Interim-Update 3
VALUE Acct-Status-Type Alive 3
VALUE Acct-Status-Type Accounting-On 7
VALUE Acct-Status-Type Accounting-Off 8
# RFC 2867 Additional Status-Type Values
VALUE Acct-Status-Type Tunnel-Start 9
VALUE Acct-Status-Type Tunnel-Stop 10
VALUE Acct-Status-Type Tunnel-Reject 11
VALUE Acct-Status-Type Tunnel-Link-Start 12
VALUE Acct-Status-Type Tunnel-Link-Stop 13
VALUE Acct-Status-Type Tunnel-Link-Reject 14
# Authentication Types
VALUE Acct-Authentic RADIUS 1
VALUE Acct-Authentic Local 2
# Termination Options
VALUE Termination-Action Default 0
VALUE Termination-Action RADIUS-Request 1
# NAS Port Types
VALUE NAS-Port-Type Async 0
VALUE NAS-Port-Type Sync 1
VALUE NAS-Port-Type ISDN 2
VALUE NAS-Port-Type ISDN-V120 3
VALUE NAS-Port-Type ISDN-V110 4
VALUE NAS-Port-Type Virtual 5
VALUE NAS-Port-Type PIAFS 6
VALUE NAS-Port-Type HDLC-Clear-Channel 7
VALUE NAS-Port-Type X.25 8
VALUE NAS-Port-Type X.75 9
VALUE NAS-Port-Type G.3-Fax 10
VALUE NAS-Port-Type SDSL 11
VALUE NAS-Port-Type ADSL-CAP 12
VALUE NAS-Port-Type ADSL-DMT 13
VALUE NAS-Port-Type IDSL 14
VALUE NAS-Port-Type Ethernet 15
VALUE NAS-Port-Type xDSL 16
VALUE NAS-Port-Type Cable 17
VALUE NAS-Port-Type Wireless-Other 18
VALUE NAS-Port-Type Wireless-802.11 19
# Acct Terminate Causes, available in 3.3.2 and later
VALUE Acct-Terminate-Cause User-Request 1
VALUE Acct-Terminate-Cause Lost-Carrier 2
VALUE Acct-Terminate-Cause Lost-Service 3
VALUE Acct-Terminate-Cause Idle-Timeout 4
VALUE Acct-Terminate-Cause Session-Timeout 5
VALUE Acct-Terminate-Cause Admin-Reset 6
VALUE Acct-Terminate-Cause Admin-Reboot 7
VALUE Acct-Terminate-Cause Port-Error 8
VALUE Acct-Terminate-Cause NAS-Error 9
VALUE Acct-Terminate-Cause NAS-Request 10
VALUE Acct-Terminate-Cause NAS-Reboot 11
VALUE Acct-Terminate-Cause Port-Unneeded 12
VALUE Acct-Terminate-Cause Port-Preempted 13
VALUE Acct-Terminate-Cause Port-Suspended 14
VALUE Acct-Terminate-Cause Service-Unavailable 15
VALUE Acct-Terminate-Cause Callback 16
VALUE Acct-Terminate-Cause User-Error 17
VALUE Acct-Terminate-Cause Host-Request 18
#VALUE Tunnel-Type L2TP 3
#VALUE Tunnel-Medium-Type IP 1
VALUE Prompt No-Echo 0
VALUE Prompt Echo 1
#
# Non-Protocol Integer Translations
#
VALUE Auth-Type Local 0
VALUE Auth-Type System 1
VALUE Auth-Type SecurID 2
VALUE Auth-Type Crypt-Local 3
VALUE Auth-Type Reject 4
VALUE Auth-Type ActivCard 5
VALUE Auth-Type EAP 6
VALUE Auth-Type ARAP 7
#
# Cistron extensions
#
VALUE Auth-Type Ldap 252
VALUE Auth-Type Pam 253
VALUE Auth-Type Accept 254
VALUE Auth-Type PAP 1024
VALUE Auth-Type CHAP 1025
VALUE Auth-Type LDAP 1026
VALUE Auth-Type PAM 1027
VALUE Auth-Type MS-CHAP 1028
VALUE Auth-Type Kerberos 1029
VALUE Auth-Type CRAM 1030
VALUE Auth-Type NS-MTA-MD5 1031
VALUE Auth-Type CRAM 1032
VALUE Auth-Type SMB 1033
#
# Authorization type, too.
#
VALUE Autz-Type Local 0
#
# Experimental Non-Protocol Integer Translations for Cistron-Radiusd
#
VALUE Fall-Through No 0
VALUE Fall-Through Yes 1
VALUE Packet-Type Access-Request 1
VALUE Packet-Type Access-Accept 2
VALUE Packet-Type Access-Reject 3
VALUE Packet-Type Accounting-Request 4
VALUE Packet-Type Accounting-Response 5
VALUE Packet-Type Accounting-Status 6
VALUE Packet-Type Password-Request 7
VALUE Packet-Type Password-Accept 8
VALUE Packet-Type Password-Reject 9
VALUE Packet-Type Accounting-Message 10
VALUE Packet-Type Access-Challenge 11
VALUE Packet-Type Status-Server 12
VALUE Packet-Type Status-Client 13

View file

@ -0,0 +1,139 @@
# -*- text -*-
# Copyright (C) 2015 The FreeRADIUS Server project and contributors
#
# The FreeRADIUS Vendor-Specific dictionary.
#
# Version: $Id: ea468da88509aeff96b6f0d38ebc97411b9775b3 $
#
# For a complete list of Private Enterprise Codes, see:
#
# http://www.isi.edu/in-notes/iana/assignments/enterprise-numbers
#
VENDOR FreeRADIUS 11344
BEGIN-VENDOR FreeRADIUS
#
# This attribute is really a bitmask.
#
ATTRIBUTE FreeRADIUS-Statistics-Type 127 integer
VALUE FreeRADIUS-Statistics-Type None 0
VALUE FreeRADIUS-Statistics-Type Authentication 1
VALUE FreeRADIUS-Statistics-Type Accounting 2
VALUE FreeRADIUS-Statistics-Type Proxy-Authentication 4
VALUE FreeRADIUS-Statistics-Type Proxy-Accounting 8
VALUE FreeRADIUS-Statistics-Type Internal 0x10
VALUE FreeRADIUS-Statistics-Type Client 0x20
VALUE FreeRADIUS-Statistics-Type Server 0x40
VALUE FreeRADIUS-Statistics-Type Home-Server 0x80
VALUE FreeRADIUS-Statistics-Type Auth-Acct 0x03
VALUE FreeRADIUS-Statistics-Type Proxy-Auth-Acct 0x0c
VALUE FreeRADIUS-Statistics-Type All 0x1f
#
# FreeRADIUS statistic result attributes
#
ATTRIBUTE FreeRADIUS-Total-Access-Requests 128 integer
ATTRIBUTE FreeRADIUS-Total-Access-Accepts 129 integer
ATTRIBUTE FreeRADIUS-Total-Access-Rejects 130 integer
ATTRIBUTE FreeRADIUS-Total-Access-Challenges 131 integer
ATTRIBUTE FreeRADIUS-Total-Auth-Responses 132 integer
ATTRIBUTE FreeRADIUS-Total-Auth-Duplicate-Requests 133 integer
ATTRIBUTE FreeRADIUS-Total-Auth-Malformed-Requests 134 integer
ATTRIBUTE FreeRADIUS-Total-Auth-Invalid-Requests 135 integer
ATTRIBUTE FreeRADIUS-Total-Auth-Dropped-Requests 136 integer
ATTRIBUTE FreeRADIUS-Total-Auth-Unknown-Types 137 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Access-Requests 138 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Access-Accepts 139 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Access-Rejects 140 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Access-Challenges 141 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Auth-Responses 142 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Auth-Duplicate-Requests 143 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Auth-Malformed-Requests 144 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Auth-Invalid-Requests 145 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Auth-Dropped-Requests 146 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Auth-Unknown-Types 147 integer
ATTRIBUTE FreeRADIUS-Total-Accounting-Requests 148 integer
ATTRIBUTE FreeRADIUS-Total-Accounting-Responses 149 integer
ATTRIBUTE FreeRADIUS-Total-Acct-Duplicate-Requests 150 integer
ATTRIBUTE FreeRADIUS-Total-Acct-Malformed-Requests 151 integer
ATTRIBUTE FreeRADIUS-Total-Acct-Invalid-Requests 152 integer
ATTRIBUTE FreeRADIUS-Total-Acct-Dropped-Requests 153 integer
ATTRIBUTE FreeRADIUS-Total-Acct-Unknown-Types 154 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Accounting-Requests 155 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Accounting-Responses 156 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Acct-Duplicate-Requests 157 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Acct-Malformed-Requests 158 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Acct-Invalid-Requests 159 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Acct-Dropped-Requests 160 integer
ATTRIBUTE FreeRADIUS-Total-Proxy-Acct-Unknown-Types 161 integer
ATTRIBUTE FreeRADIUS-Queue-Len-Internal 162 integer
ATTRIBUTE FreeRADIUS-Queue-Len-Proxy 163 integer
ATTRIBUTE FreeRADIUS-Queue-Len-Auth 164 integer
ATTRIBUTE FreeRADIUS-Queue-Len-Acct 165 integer
ATTRIBUTE FreeRADIUS-Queue-Len-Detail 166 integer
ATTRIBUTE FreeRADIUS-Stats-Start-Time 176 date
ATTRIBUTE FreeRADIUS-Stats-HUP-Time 177 date
ATTRIBUTE FreeRADIUS-Queue-PPS-In 181 integer
ATTRIBUTE FreeRADIUS-Queue-PPS-In 182 integer
END-VENDOR FreeRADIUS
# MikroTik Attributes
VENDOR Mikrotik 14988
BEGIN-VENDOR Mikrotik
ATTRIBUTE Mikrotik-Recv-Limit 1 integer
ATTRIBUTE Mikrotik-Xmit-Limit 2 integer
ATTRIBUTE Mikrotik-Group 3 string
ATTRIBUTE Mikrotik-Wireless-Forward 4 integer
ATTRIBUTE Mikrotik-Wireless-Skip-Dot1x 5 integer
ATTRIBUTE Mikrotik-Wireless-Enc-Algo 6 integer
ATTRIBUTE Mikrotik-Wireless-Enc-Key 7 string
ATTRIBUTE Mikrotik-Rate-Limit 8 string
ATTRIBUTE Mikrotik-Realm 9 string
ATTRIBUTE Mikrotik-Host-IP 10 ipaddr
ATTRIBUTE Mikrotik-Mark-Id 11 string
ATTRIBUTE Mikrotik-Advertise-URL 12 string
ATTRIBUTE Mikrotik-Advertise-Interval 13 integer
ATTRIBUTE Mikrotik-Recv-Limit-Gigawords 14 integer
ATTRIBUTE Mikrotik-Xmit-Limit-Gigawords 15 integer
ATTRIBUTE Mikrotik-Wireless-PSK 16 string
ATTRIBUTE Mikrotik-Total-Limit 17 integer
ATTRIBUTE Mikrotik-Total-Limit-Gigawords 18 integer
ATTRIBUTE Mikrotik-Address-List 19 string
ATTRIBUTE Mikrotik-Wireless-MPKey 20 string
ATTRIBUTE Mikrotik-Wireless-Comment 21 string
ATTRIBUTE Mikrotik-Delegated-IPv6-Pool 22 string
ATTRIBUTE Mikrotik-DHCP-Option-Set 23 string
ATTRIBUTE Mikrotik-DHCP-Option-Param_STR1 24 string
ATTRIBUTE Mikrotik-DHCP-Option-Param_STR2 25 string
ATTRIBUTE Mikrotik-Wireless-VLANID 26 integer
ATTRIBUTE Mikrotik-Wireless-VLANIDtype 27 integer
ATTRIBUTE Mikrotik-Wireless-Minsignal 28 string
ATTRIBUTE Mikrotik-Wireless-Maxsignal 29 string
ATTRIBUTE Mikrotik-Switching-Filter 30 string
# MikroTik Values
VALUE Mikrotik-Wireless-Enc-Algo No-encryption 0
VALUE Mikrotik-Wireless-Enc-Algo 40-bit-WEP 1
VALUE Mikrotik-Wireless-Enc-Algo 104-bit-WEP 2
VALUE Mikrotik-Wireless-Enc-Algo AES-CCM 3
VALUE Mikrotik-Wireless-Enc-Algo TKIP 4
VALUE Mikrotik-Wireless-VLANIDtype 802.1q 0
VALUE Mikrotik-Wireless-VLANIDtype 802.1ad 1
END-VENDOR Mikrotik

View file

@ -0,0 +1,169 @@
# -*- text -*-
# Copyright (C) 2011 The FreeRADIUS Server project and contributors
#
# Microsoft's VSA's, from RFC 2548
#
# $Id: cd23825d14af1099f38db2b75233246ab4f53df9 $
#
VENDOR Microsoft 311
BEGIN-VENDOR Microsoft
ATTRIBUTE MS-CHAP-Response 1 octets[50]
ATTRIBUTE MS-CHAP-Error 2 string
ATTRIBUTE MS-CHAP-CPW-1 3 octets[70]
ATTRIBUTE MS-CHAP-CPW-2 4 octets[84]
ATTRIBUTE MS-CHAP-LM-Enc-PW 5 octets
ATTRIBUTE MS-CHAP-NT-Enc-PW 6 octets
ATTRIBUTE MS-MPPE-Encryption-Policy 7 integer
VALUE MS-MPPE-Encryption-Policy Encryption-Allowed 1
VALUE MS-MPPE-Encryption-Policy Encryption-Required 2
# This is referred to as both singular and plural in the RFC.
# Plural seems to make more sense.
ATTRIBUTE MS-MPPE-Encryption-Type 8 integer
ATTRIBUTE MS-MPPE-Encryption-Types 8 integer
VALUE MS-MPPE-Encryption-Types RC4-40bit-Allowed 1
VALUE MS-MPPE-Encryption-Types RC4-128bit-Allowed 2
VALUE MS-MPPE-Encryption-Types RC4-40or128-bit-Allowed 6
ATTRIBUTE MS-RAS-Vendor 9 integer # content is Vendor-ID
ATTRIBUTE MS-CHAP-Domain 10 string
ATTRIBUTE MS-CHAP-Challenge 11 octets
ATTRIBUTE MS-CHAP-MPPE-Keys 12 octets encrypt=1
ATTRIBUTE MS-BAP-Usage 13 integer
ATTRIBUTE MS-Link-Utilization-Threshold 14 integer # values are 1-100
ATTRIBUTE MS-Link-Drop-Time-Limit 15 integer
ATTRIBUTE MS-MPPE-Send-Key 16 octets encrypt=2
ATTRIBUTE MS-MPPE-Recv-Key 17 octets encrypt=2
ATTRIBUTE MS-RAS-Version 18 string
ATTRIBUTE MS-Old-ARAP-Password 19 octets
ATTRIBUTE MS-New-ARAP-Password 20 octets
ATTRIBUTE MS-ARAP-PW-Change-Reason 21 integer
ATTRIBUTE MS-Filter 22 octets
ATTRIBUTE MS-Acct-Auth-Type 23 integer
ATTRIBUTE MS-Acct-EAP-Type 24 integer
ATTRIBUTE MS-CHAP2-Response 25 octets[50]
ATTRIBUTE MS-CHAP2-Success 26 octets
ATTRIBUTE MS-CHAP2-CPW 27 octets[68]
ATTRIBUTE MS-Primary-DNS-Server 28 ipaddr
ATTRIBUTE MS-Secondary-DNS-Server 29 ipaddr
ATTRIBUTE MS-Primary-NBNS-Server 30 ipaddr
ATTRIBUTE MS-Secondary-NBNS-Server 31 ipaddr
#ATTRIBUTE MS-ARAP-Challenge 33 octets[8]
## MS-RNAP
#
# http://download.microsoft.com/download/9/5/E/95EF66AF-9026-4BB0-A41D-A4F81802D92C/%5BMS-RNAP%5D.pdf
ATTRIBUTE MS-RAS-Client-Name 34 string
ATTRIBUTE MS-RAS-Client-Version 35 string
ATTRIBUTE MS-Quarantine-IPFilter 36 octets
ATTRIBUTE MS-Quarantine-Session-Timeout 37 integer
ATTRIBUTE MS-User-Security-Identity 40 string
ATTRIBUTE MS-Identity-Type 41 integer
ATTRIBUTE MS-Service-Class 42 string
ATTRIBUTE MS-Quarantine-User-Class 44 string
ATTRIBUTE MS-Quarantine-State 45 integer
ATTRIBUTE MS-Quarantine-Grace-Time 46 integer
ATTRIBUTE MS-Network-Access-Server-Type 47 integer
ATTRIBUTE MS-AFW-Zone 48 integer
VALUE MS-AFW-Zone MS-AFW-Zone-Boundary-Policy 1
VALUE MS-AFW-Zone MS-AFW-Zone-Unprotected-Policy 2
VALUE MS-AFW-Zone MS-AFW-Zone-Protected-Policy 3
ATTRIBUTE MS-AFW-Protection-Level 49 integer
VALUE MS-AFW-Protection-Level HECP-Response-Sign-Only 1
VALUE MS-AFW-Protection-Level HECP-Response-Sign-And-Encrypt 2
ATTRIBUTE MS-Machine-Name 50 string
ATTRIBUTE MS-IPv6-Filter 51 octets
ATTRIBUTE MS-IPv4-Remediation-Servers 52 octets
ATTRIBUTE MS-IPv6-Remediation-Servers 53 octets
ATTRIBUTE MS-RNAP-Not-Quarantine-Capable 54 integer
VALUE MS-RNAP-Not-Quarantine-Capable SoH-Sent 0
VALUE MS-RNAP-Not-Quarantine-Capable SoH-Not-Sent 1
ATTRIBUTE MS-Quarantine-SOH 55 octets
ATTRIBUTE MS-RAS-Correlation 56 octets
# Or this might be 56?
ATTRIBUTE MS-Extended-Quarantine-State 57 integer
ATTRIBUTE MS-HCAP-User-Groups 58 string
ATTRIBUTE MS-HCAP-Location-Group-Name 59 string
ATTRIBUTE MS-HCAP-User-Name 60 string
ATTRIBUTE MS-User-IPv4-Address 61 ipaddr
ATTRIBUTE MS-User-IPv6-Address 62 ipv6addr
ATTRIBUTE MS-TSG-Device-Redirection 63 integer
#
# Integer Translations
#
# MS-BAP-Usage Values
VALUE MS-BAP-Usage Not-Allowed 0
VALUE MS-BAP-Usage Allowed 1
VALUE MS-BAP-Usage Required 2
# MS-ARAP-Password-Change-Reason Values
VALUE MS-ARAP-PW-Change-Reason Just-Change-Password 1
VALUE MS-ARAP-PW-Change-Reason Expired-Password 2
VALUE MS-ARAP-PW-Change-Reason Admin-Requires-Password-Change 3
VALUE MS-ARAP-PW-Change-Reason Password-Too-Short 4
# MS-Acct-Auth-Type Values
VALUE MS-Acct-Auth-Type PAP 1
VALUE MS-Acct-Auth-Type CHAP 2
VALUE MS-Acct-Auth-Type MS-CHAP-1 3
VALUE MS-Acct-Auth-Type MS-CHAP-2 4
VALUE MS-Acct-Auth-Type EAP 5
# MS-Acct-EAP-Type Values
VALUE MS-Acct-EAP-Type MD5 4
VALUE MS-Acct-EAP-Type OTP 5
VALUE MS-Acct-EAP-Type Generic-Token-Card 6
VALUE MS-Acct-EAP-Type TLS 13
# MS-Identity-Type Values
VALUE MS-Identity-Type Machine-Health-Check 1
VALUE MS-Identity-Type Ignore-User-Lookup-Failure 2
# MS-Quarantine-State Values
VALUE MS-Quarantine-State Full-Access 0
VALUE MS-Quarantine-State Quarantine 1
VALUE MS-Quarantine-State Probation 2
# MS-Network-Access-Server-Type Values
VALUE MS-Network-Access-Server-Type Unspecified 0
VALUE MS-Network-Access-Server-Type Terminal-Server-Gateway 1
VALUE MS-Network-Access-Server-Type Remote-Access-Server 2
VALUE MS-Network-Access-Server-Type DHCP-Server 3
VALUE MS-Network-Access-Server-Type Wireless-Access-Point 4
VALUE MS-Network-Access-Server-Type HRA 5
VALUE MS-Network-Access-Server-Type HCAP-Server 6
# MS-Extended-Quarantine-State Values
VALUE MS-Extended-Quarantine-State Transition 1
VALUE MS-Extended-Quarantine-State Infected 2
VALUE MS-Extended-Quarantine-State Unknown 3
VALUE MS-Extended-Quarantine-State No-Data 4
END-VENDOR Microsoft

139
py/libs/red.py Normal file
View file

@ -0,0 +1,139 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# red.py: read/write data in Redis
# - get/set key values with expiration time
# - simple list operations
# - atomic increment, getset
#
#
# https://redis.io/commands
# https://github.com/andymccurdy/redis-py
#
# MikroWizard.com , Mikrotik router management solution
# Author: Tomi.Mickelsson@iki.fi modified by sepehr.ha@gmail.com
import redis
import datetime
import time
from collections import defaultdict
import logging
log = logging.getLogger("RedisDB")
# --------------------------------------------------------------------------
# key values
class RedisDB(object):
def __init__(self, options):
self.dev_id = options.get('dev_id',False)
self.keys= options.get('keys',[])
self.current_time = datetime.datetime.now()
self.start_time = options.get('start_time',self.current_time + datetime.timedelta(days=-30))
self.end_time = options.get('end_time',self.current_time)
self.retention = options.get('retention',2629800000)
self.r = redis.Redis(host='localhost', port=6379, db=0)
self.delta = options.get('delta','')
def create_sensor_rts(self,sensor):
retention=self.retention
if "rx" in sensor or "tx" in sensor:
retention=3600000
if self.dev_id==False:
return
master_key="sensor::{}::{}".format(self.dev_id,sensor)
rule5m_key="sensor5m::{}::{}".format(self.dev_id,sensor)
rule1h_key="sensor1h::{}::{}".format(self.dev_id,sensor)
ruledaily_key="sensordaily::{}::{}".format(self.dev_id,sensor)
#Create master key for sensor data or change retention time
try:
self.r.ts().create(master_key,retention_msecs=retention,duplicate_policy="last")
except Exception as e:
self.r.ts().alter(master_key,retention_msecs=retention)
pass
#Create ryle keys for sensor avg data or change retention time
try:
#5m avg store for 24h
#1h avg store for 2weeks
#daily avg store for 3month
self.r.ts().create(rule5m_key,retention_msecs=3600000*24,duplicate_policy="last")
self.r.ts().create(rule1h_key,retention_msecs=3600000*336,duplicate_policy="last")
self.r.ts().create(ruledaily_key,retention_msecs=retention*2160,duplicate_policy="last")
except Exception as e:
self.r.ts().alter(rule5m_key,retention_msecs=3600000*24)
self.r.ts().alter(rule1h_key,retention_msecs=3600000*336)
self.r.ts().alter(ruledaily_key,retention_msecs=3600000*2160)
pass
#Create rule for 5m avg data or change retention time
try:
self.r.ts().createrule(master_key, rule5m_key, "avg" ,bucket_size_msec=300000)
except Exception as e:
pass
#Create rule for 1hour avg data or change retention time
try:
self.r.ts().createrule(master_key, rule1h_key, "avg" ,bucket_size_msec=3600000)
except Exception as e:
pass
#Create rule for daily avg data or change retention time
try:
self.r.ts().createrule(master_key, ruledaily_key, "avg" ,bucket_size_msec=86400000)
except Exception as e:
pass
return True
def dev_create_keys(self):
if self.dev_id==False:
return
for key in self.keys:
try:
self.create_sensor_rts(key)
except Exception as e:
log.error(e)
pass
return True
def add_dev_data(self,info=[]):
if self.dev_id==False:
return
datalist=[]
for key, val in info.items():
master_key="sensor::{}::{}".format(self.dev_id,key)
datalist.append((master_key , '*' , val))
self.r.ts().madd(datalist)
return True
def get_dev_data(self,sensor):
if self.dev_id==False:
return
start=int(time.mktime(self.start_time.timetuple())* 1000)
end=int(time.mktime(self.end_time.timetuple())* 1000)
if self.delta=='live':
master_key="sensor::{}::{}".format(self.dev_id,sensor)
else:
master_key="sensor{}::{}::{}".format(self.delta,self.dev_id,sensor)
if self.delta=='live':
return list(reversed(self.r.ts().revrange(master_key,start,end,count=30)))
return self.r.ts().range(master_key,start,end)
def get_dev_last_data(self,sensor):
if self.dev_id==False:
return
master_key="sensor::{}::{}".format(self.dev_id,sensor)
return self.r.ts().get(master_key)
def get_dev_data_keys(self):
if self.dev_id==False:
return
data = defaultdict(list)
for key in self.keys:
try:
data[key]=self.get_dev_data(key)
except Exception as e:
log.error(e)
pass
return data

77
py/libs/ssh_helper.py Normal file
View file

@ -0,0 +1,77 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# ssh_helper.py: ssh related operations
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
import datetime
from libs.check_routeros.routeros_check.helper import logger, RouterOSVersion
import paramiko
import re
import logging
log = logging.getLogger("SSH_HELPER")
#rdb = redis.StrictRedis(host=config.redishost)
#rdb = redis.from_url('redis://{}'.format(config.redishost))
#r = redis.Redis()
# --------------------------------------------------------------------------
# key values
class SSH_Helper(object):
def __init__(self, options):
self.dev_id = options.get('dev_id',False)
self.host = options.get('host',False)
self.username = options.get('username',False)
self.password = options.get('password',False)
self.api_port = options.get('port',False)
self.ssh_port = options.get('ssh_port',22)
self.router = options.get('router',False)
self.current_time = datetime.datetime.now()
self.ssh=paramiko.SSHClient()
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
def get_config(self, retrieve='all', full=False, sanitized=False):
if not self.router:
return False
configs = {'running': '', 'candidate': '', 'startup': ''}
command = ["export", "terse"]
version = tuple(self.router.api('/system/package/update/print'))[0]
version = RouterOSVersion(version['installed-version'])
if full:
command.append("verbose")
if version.major >= 7 and not sanitized:
command.append("show-sensitive")
if version.major <= 6 and sanitized:
command.append("hide-sensitive")
self.ssh.connect(
self.host,
port=self.ssh_port,
username=self.username,
password=self.password,
look_for_keys=False,
allow_agent=False
)
_x, stdouts, _y = self.ssh.exec_command(" ".join(command))
config = stdouts.read().decode().strip()
# remove date/time in 1st line
config = re.sub(r"^# \S+ \S+ by (.+)$", r'# by \1', config, flags=re.MULTILINE)
if retrieve in ("running", "all"):
configs['running'] = config
return configs['running']
def exec_command(self, command):
self.ssh.connect(
self.host,
port=self.ssh_port,
username=self.username,
password=self.password,
look_for_keys=False,
allow_agent=False
)
_x, stdouts, _y = self.ssh.exec_command(command)
return stdouts.read().decode().strip()

1115
py/libs/util.py Normal file

File diff suppressed because it is too large Load diff

296
py/libs/webutil.py Normal file
View file

@ -0,0 +1,296 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# webutil.py: low level page request related methods, decorators, Flask app
# MikroWizard.com , Mikrotik router management solution
# Author: Tomi.Mickelsson@iki.fi , Modified by sepehr.ha@gmail.com
import time
import peewee
import functools
from flask import Flask, request, session, g, jsonify
from flask_session import Session
from flask.json.provider import DefaultJSONProvider
from libs.db import db
import config
import datetime
import logging
log = logging.getLogger("webutil")
# create and configure the Flask app
app = Flask(__name__, static_folder=None, template_folder="../templates")
app.config.update(config.flask_config)
Session(app)
# --------------------------------------------------------------------------
# API decorator
def login_required(func=None, role=None , perm={}):
"""Decorator: must be logged on, and optionally must have the given role.
Insert after app.route like this:
@app.route('/api/users')
@login_required(role='superuser')"""
# yes, this is python magic, see https://blogs.it.ox.ac.uk/inapickle/2012/01/05/python-decorators-with-optional-arguments/
if not func:
return functools.partial(login_required, role=role, perm=perm)
@functools.wraps(func)
def inner(*args, **kwargs):
return _check_user_role(role, perm=perm) or func(*args, **kwargs )
return inner
# --------------------------------------------------------------------------
# get data about me, return error replys
def get_myself():
"""Return the user object of the caller or None if he is a visitor.
Loads the user from the database, then caches it during request."""
if not "userid" in session:
return None
if hasattr(g, "MYSELF"):
return g.MYSELF # use cache
else:
g.MYSELF = db.get_user(session["userid"])
return g.MYSELF
def error_reply(errmsg, httpcode=200):
"""Logs an error and returns error code to the caller."""
log.error(errmsg)
return buildResponse({"status":"failed","err":"{}: {}".format(httpcode, errmsg)}, httpcode)
def warn_reply(errmsg, httpcode=200):
"""Logs a warning and returns error code to the caller."""
log.warning(errmsg)
return buildResponse({"status":"failed","error":"{}: {}".format(httpcode, errmsg)}, httpcode)
def get_agent():
"""Returns browser of caller."""
return request.headers.get('User-Agent', '')
def get_ip():
"""Returns IP address of caller."""
return request.headers.get('X-Real-IP') or request.remote_addr
def buildResponse(data,httpcode=200 ,error=False):
"""Builds a JSON response from data."""
res={}
if error:
res["err"]=error
res["result"]=data
return jsonify(res), httpcode
# --------------------------------------------------------------------------
# before/after/error request handlers
@app.before_request
def before_request():
"""Executed always before a request. Connects to db, logs the request,
prepares global data, loads current user."""
# log request path+input, but not secrets
try:
params = request.json or request.args or request.form
except:
params = None
if params:
cloned = None
secret_keys = ["password", "passwd", "pwd"]
for k in secret_keys:
if k in params:
if not cloned:
cloned = params.copy()
cloned[k] = 'X'
if cloned:
params = cloned
params = str(params or '')[:1000]
method = request.method[:2]
# log.info("{} {} {}".format(method, request.path, params))
# connect to db
g.db = db.database
g.db.connection()
# have common data available in global g
# but do not pollute g, store only the most relevant data
g.HOST = request.headers.get('X-Real-Host', '')
g.ISLOGGED = "userid" in session
myrole = session.get("role") or ""
g.IS_SUPER_USER = myrole == "superuser"
if myrole == "disabled":
err = "account disabled"
log.warn(err)
return jsonify({"err":err}), 400
# time the request
g.t1 = time.time()
# where did we link from? (but filter our internal links)
# if request.referrer:
# log.info("linked from "+request.referrer)
@app.after_request
def after_request(response):
"""Executed after a request, unless a request occurred."""
# log about error
logmethod = None
if 400 <= response.status_code <= 599:
logmethod = log.error
elif not 200 <= response.status_code < 399:
logmethod = log.warn
if logmethod:
logmethod(" {} {} {}".format(response.status_code,
request.method, request.url))
# set CORS headers
response.headers['Access-Control-Allow-Origin'] = config.CORS_ALLOW_ORIGIN
response.headers['Access-Control-Allow-Methods'] = 'GET,POST,PUT,DELETE,OPTIONS'
response.headers['Access-Control-Allow-Headers'] = 'Content-Type'
response.headers['Access-Control-Allow-Credentials'] = 'true'
# response.headers['Access-Control-Expose-Headers'] = 'Access-Control-Allow-Origin'
return response
@app.teardown_request
def teardown(error):
"""Always executed after a request."""
if hasattr(g, "db"):
g.db.close()
# log warning when a request takes >1.0sec
# (put long-running tasks into background)
if hasattr(g, "t1"):
delta = time.time()-g.t1
if delta > 1.0:
log.warn("SLOW! {} time={}".format(request.path, delta))
@app.errorhandler(404)
def page_not_found(error):
err = "404: " + request.path
return jsonify({"err":err}), 404
# --------------------------------------------------------------------------
# logging (is in this module because binds to session)
class ColorFormatter(logging.Formatter):
"""Colorize warnings and errors"""
def format(self, rec):
if rec.levelno == logging.WARNING:
rec.msg = "\033[93m{}\033[0m".format(rec.msg)
elif rec.levelno in (logging.ERROR, logging.CRITICAL):
rec.msg = "\033[91m{}\033[0m".format(rec.msg)
return logging.Formatter.format(self, rec)
class MyLogContextFilter(logging.Filter):
"""Injects contextual info, ip+userid, into the log."""
def filter(self, record):
if request:
# take ip from a header or actual
ip = get_ip()
# take userid from the session
uid = session.get("userid", "anon")
else:
ip = ""
uid = " -WORKER" # background worker
record.ip = "local" if config.IS_LOCAL_DEV else ip
record.uid = uid
return True
def init_logging():
"""Initialize logging system."""
prefix = "PROD " if config.IS_PRODUCTION else ""
format = prefix+"%(levelname)3.3s %(uid)s@%(ip)s %(asctime)s %(filename)s %(message)s"
dfmt = "%d%m%y-%H:%M:%S"
logging.basicConfig(level=logging.INFO, format=format, datefmt=dfmt)
formatter = ColorFormatter(format, datefmt=dfmt)
# custom log data: userid + ip addr
f = MyLogContextFilter()
for handler in logging.root.handlers:
handler.addFilter(f)
handler.setFormatter(formatter) # remove if coloring not wanted
if config.PYSRV_LOG_SQL:
logging.getLogger('peewee').setLevel(logging.DEBUG)
# --------------------------------------------------------------------------
# internal methods, serializing models
def _check_user_role(rolebase , perm={}):
"""Check that my role is atleast the given role. If not, log and return
an error."""
myrole = session.get("role") or ""
if not _is_role_atleast(myrole, rolebase,perm):
uid = session.get("userid") or ""
err = "Unauthorized! {} {} user={}".format(
request.method, request.path, uid)
return warn_reply(err, 200)
def _is_role_atleast(myrole, rolebase, perm):
"""Checks that myrole is same or above rolebase. Assumes a
simple role model where roles can be arranged from lowest
access to highest access level."""
if not rolebase:
# no role required, but I need to be logged-on
return "userid" in session
userperms=session.get("perms") or {}
perms = { "None":1,"read":2, "write":3, "full":4}
res=True;
if len(perm)>0:
for key, value in perm.items():
if key in userperms:
res=res and perms[userperms[key]]>=perms[value]
else:
return False
levels = {"readonly":1, "editor":2, "user":3, "admin":4, "superuser":5}
try:
return res and levels[myrole] >= levels[rolebase]
except Exception as e:
log.error(e)
return False
class MyJSONEncoder(DefaultJSONProvider):
def default(self, obj):
if isinstance(obj, peewee.SelectBase):
return list(obj)
elif isinstance(obj, db.BaseModel):
return obj.serialize()
elif isinstance(obj, datetime.datetime):
return obj.isoformat() if obj else None
#elif isinstance(obj, sqlite3.Cursor):
#return list(obj)
#if isinstance(obj, psycopg2.extensions.cursor):
#return list(obj)
return DefaultJSONProvider.default(obj)
app.json = MyJSONEncoder(app)
init_logging()

33
py/main.py Normal file
View file

@ -0,0 +1,33 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# main.py: server main script
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com thanks to Tomi.Mickelsson@iki.fi (RESTPie3)
# register endpoints
from api import api_account
from api import api_dev
from api import api_sysconfig
from api import api_firmware
from api import api_user_tasks
from api import api_logs
from api import api_scanner
from api import api_backups
from api import api_snippet
try:
from api import api_pro_api
except ImportError:
pass
import logging
log = logging.getLogger("main")
log.info("Running! http://localhost:8100")
from libs.webutil import app
if app.testing:
import werkzeug.debug
app.wsgi_app = werkzeug.debug.DebuggedApplication(app.wsgi_app, True)
# uwsgi-daemon takes over the app...

99
py/mules/data_grabber.py Normal file
View file

@ -0,0 +1,99 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# syslog.py: independent worker process for grabbing data of devices
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
import time
from libs import util
from libs.db import db_device,db_sysconfig,db_events
from threading import Thread
from libs.red import RedisDB
import netifaces
import json
import queue
import logging
log = logging.getLogger("Data_grabber")
def grab_device_data(timer=2):
all_devices=list(db_device.get_all_device())
num_threads = len(all_devices)
q = queue.Queue()
threads = []
log.info("Data grabber started")
for dev in all_devices:
time.sleep(0.2)
t = Thread(target=util.grab_device_data, args=(dev, q))
t.start()
threads.append(t)
for t in threads:
t.join()
res=[]
totals={
'rx-total':0,
'tx-total':0
}
data=False
for _ in range(num_threads):
qres=q.get()
if not qres.get("reason",False):
data=qres.get("data", None)
if data:
if data.get("rx-total", False):
totals['rx-total']+=data["rx-total"]
if data.get("tx-total", False):
totals["tx-total"]+=data["tx-total"]
res.append(qres)
else:
db_events.connection_event(qres['id'],'Data Puller',qres.get("detail","connection"),"Critical",0,qres.get("reason","problem in data puller"))
keys=["rx-total","tx-total"]
redopts={
"dev_id":'all',
"keys":keys
}
try:
if data:
reddb=RedisDB(redopts)
reddb.dev_create_keys()
reddb.add_dev_data(data)
except Exception as e:
log.error(e)
def get_all_ipv4_addresses():
ips=db_sysconfig.get_sysconfig('all_ip')
ipv4_addresses = []
# Iterate over all network interfaces
for interface in netifaces.interfaces():
# Get all IPv4 addresses associated with the interface
addresses = netifaces.ifaddresses(interface).get(netifaces.AF_INET, [])
# Append IPv4 addresses to the list
for link in addresses:
if '127.0.0.1' in link['addr']:
continue
ipv4_addresses.append(link['addr'])
ipv4_addresses.sort()
ipv4_addresses=json.dumps(ipv4_addresses)
if ips!=ipv4_addresses:
db_sysconfig.update_sysconfig('all_ip',ipv4_addresses)
def main():
while True:
config=db_sysconfig.get_scan_mode().value
get_all_ipv4_addresses()
grab_device_data()
time.sleep(60)
log.info("data grabbing end")
if __name__ == '__main__':
main()

64
py/mules/firmware.py Normal file
View file

@ -0,0 +1,64 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# syslog.py: independent worker process for updating firmware of incomplate update tasks
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
import time
from libs import util
from libs.db import db_tasks,db_device
import logging
import queue
from threading import Thread
log = logging.getLogger("Firmware")
try:
from libs import utilpro
ISPRO=True
except ImportError:
ISPRO=False
pass
def updater():
task=db_tasks.firmware_service_status()
if not task.status:
log.info("Firmware updater started")
task.status=1
task.save()
try:
devs = list(db_device.Devices.select().where(db_device.Devices.firmware_to_install.is_null(False) & (db_device.Devices.failed_attempt < 4) & ((db_device.Devices.status=='updated' ) | ( db_device.Devices.status=='failed'))))
num_threads = len(devs)
q = queue.Queue()
threads = []
for dev in devs:
if ISPRO:
t = Thread(target=utilpro.update_device, args=(dev,{"version_to_install":dev.firmware_to_install},False, q))
else:
t = Thread(target=util.update_device, args=(dev, q))
t.start()
threads.append(t)
for t in threads:
t.join()
res=[]
for _ in range(num_threads):
qres=q.get()
except Exception as e:
log.error(e)
task.status=0
task.save()
return False
task.status=0
task.save()
return False
def main():
while True:
try:
updater()
except:
pass
time.sleep(60)
if __name__ == '__main__':
main()

238
py/mules/radius.py Normal file
View file

@ -0,0 +1,238 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# radius.py: independent worker process as a radius server
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from libs.db.db_device import Devices,EXCLUDED,database
from libs.db import db_sysconfig
import logging
import time
import asyncio
import logging
import traceback
from pyrad.dictionary import Dictionary
from pyrad.server_async import ServerAsync
from pyrad.packet import AccessAccept,AccessReject
from pyrad.server import RemoteHost
from libs.mschap3 import mschap,mppe
from libs.db import db,db_user_group_perm,db_device,db_groups,db_device,db_AA,db_sysconfig
from libs.util import FourcePermToRouter
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except:
pass
log = logging.getLogger("Radius")
logging.basicConfig(filename="pyrad.log", level="DEBUG",
format="%(asctime)s [%(levelname)-8s] %(message)s")
class RadServer(ServerAsync):
def __init__(self, loop, dictionary):
ServerAsync.__init__(self, loop=loop, dictionary=dictionary,
debug=True)
def verifyMsChapV2(self,pkt,userpwd,group,nthash):
ms_chap_response = pkt['MS-CHAP2-Response'][0]
authenticator_challenge = pkt['MS-CHAP-Challenge'][0]
if len(ms_chap_response)!=50:
raise Exception("Invalid MSCHAPV2-Response attribute length")
nt_response = ms_chap_response[26:50]
peer_challenge = ms_chap_response[2:18]
_user_name = pkt.get(1)[0]
nt_resp = mschap.generate_nt_response_mschap2(
authenticator_challenge,
peer_challenge,
_user_name,
userpwd,
nthash
)
if nt_resp == nt_response:
auth_resp = mschap.generate_authenticator_response(
userpwd,
nt_response,
peer_challenge,
authenticator_challenge,
_user_name,
nthash
)
mppeSendKey, mppeRecvKey = mppe.mppe_chap2_gen_keys(userpwd, nt_response,nthash)
if group:
reply = self.CreateReplyPacket(pkt, **{
"MS-CHAP2-Success": auth_resp.encode(),
"Mikrotik-Group": group,
})
else:
reply = self.CreateReplyPacket(pkt, **{
"MS-CHAP2-Success": auth_resp.encode(),
})
reply.code = AccessAccept
return reply
else:
return False
def send_auth_reject(self,protocol,pkt,addr):
reply = self.CreateReplyPacket(pkt, **{
})
reply.code = AccessReject
reply.error_msg = "User password wrong"
#log failed attempts
protocol.send_response(reply, addr)
def handle_auth_packet(self, protocol, pkt, addr):
# log.error("Attributes: ")
# for attr in pkt.keys():
# log.error("%s: %s" % (attr, pkt[attr]))
try:
tz=int(time.time())
username = pkt['User-Name'][0]
userip=pkt['Calling-Station-Id'][0]
devip=pkt['NAS-IP-Address'][0]
dev=db_device.query_device_by_ip(devip)
if not dev:
self.send_auth_reject(protocol,pkt,addr)
return
u = db.get_user_by_username(username)
if not u:
self.send_auth_reject(protocol,pkt,addr)
db_AA.Auth.add_log(dev.id, 'failed', username , userip , by=None,sessionid=None,timestamp=tz,message="User Not Exist")
return
else:
#get user permision related to device
if not dev:
self.send_auth_reject(protocol, pkt, addr)
db_AA.Auth.add_log(dev.id, 'failed', username, userip, by=None, sessionid=None, timestamp=tz, message="Device Not Exist")
return
force_perms=True if db_sysconfig.get_sysconfig('force_perms')=="True" else False
if force_perms:
dev_groups=db_groups.devgroups(dev.id)
dev_groups_ids=[group.id for group in dev_groups]
dev_groups_ids.append(1)
res=False
if dev and len(dev_groups_ids)>0:
perm=db_user_group_perm.DevUserGroupPermRel.query_permission_by_user_and_device_group(u.id,dev_groups_ids)
res2=False
if len(list(perm))>0:
res2=FourcePermToRouter(dev,perm)
if not res2:
self.send_auth_reject(protocol,pkt,addr)
db_AA.Auth.add_log(dev.id, 'failed', username , userip , by=None,sessionid=None,timestamp=tz,message="Unable to verify group")
return
nthash=u.hash
if force_perms:
reply=self.verifyMsChapV2(pkt,"password",perm[0].perm_id.name,nthash)
else:
reply=self.verifyMsChapV2(pkt,"password",False,nthash)
if reply:
protocol.send_response(reply, addr)
return
db_AA.Auth.add_log(dev.id, 'failed', username , userip , by=None,sessionid=None,timestamp=tz,message="Wrong Password")
self.send_auth_reject(protocol,pkt,addr)
except Exception as e:
print(e)
self.send_auth_reject(protocol,pkt,addr)
#log failed attempts
def handle_acct_packet(self, protocol, pkt, addr):
try:
ts = int(time.time())
dev_ip=pkt['NAS-IP-Address'][0]
dev=db_device.query_device_by_ip(dev_ip)
type=pkt['Acct-Status-Type'][0]
user=pkt['User-Name'][0]
userip=pkt['Calling-Station-Id'][0]
sessionid=pkt['Acct-Session-Id'][0]
if type == 'Start':
db_AA.Auth.add_log(dev.id, 'loggedin', user , userip , None,timestamp=ts,sessionid=sessionid)
elif type == 'Stop':
db_AA.Auth.add_log(dev.id, 'loggedout', user , userip , None,timestamp=ts,sessionid=sessionid)
except Exception as e:
log.error("Error in accounting: ")
log.error(e)
log.error("Received an accounting request")
log.error("Attributes: ")
log.error(pkt.keys())
# for attr in pkt.keys():
# log.error("%s: %s" % (attr, pkt[attr]))
reply = self.CreateReplyPacket(pkt)
protocol.send_response(reply, addr)
def handle_coa_packet(self, protocol, pkt, addr):
log.error("Received an coa request")
log.error("Attributes: ")
for attr in pkt.keys():
log.error("%s: %s" % (attr, pkt[attr]))
reply = self.CreateReplyPacket(pkt)
protocol.send_response(reply, addr)
def handle_disconnect_packet(self, protocol, pkt, addr):
log.error("Received an disconnect request")
log.error("Attributes: ")
for attr in pkt.keys():
log.error("%s: %s" % (attr, pkt[attr]))
reply = self.CreateReplyPacket(pkt)
# COA NAK
reply.code = 45
protocol.send_response(reply, addr)
def main():
# create server and read dictionary
loop = asyncio.get_event_loop()
server = RadServer(loop=loop, dictionary=Dictionary('py/libs/raddic/dictionary'))
secret = db_sysconfig.get_sysconfig('rad_secret')
server.hosts["0.0.0.0"] = RemoteHost("0.0.0.0",
secret.encode(),
"localhost")
try:
# Initialize transports
loop.run_until_complete(
asyncio.ensure_future(
server.initialize_transports(enable_auth=True,
enable_acct=True,
enable_coa=False,
addresses=['0.0.0.0'])))
try:
# start server
loop.run_forever()
except KeyboardInterrupt as k:
pass
# Close transports
loop.run_until_complete(asyncio.ensure_future(
server.deinitialize_transports()))
except Exception as exc:
log.error('Error: ', exc)
log.error('\n'.join(traceback.format_exc().splitlines()))
# Close transports
loop.run_until_complete(asyncio.ensure_future(
server.deinitialize_transports()))
loop.close()
if __name__ == '__main__':
main()

163
py/mules/syslog.py Normal file
View file

@ -0,0 +1,163 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# syslog.py: independent worker process as a syslog server
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
from math import e
import socketserver
import re
import time
from libs.db import db_device
import logging
from libs.db import db_AA,db_events
log = logging.getLogger("SYSLOG")
from libs import util
try:
from libs import utilpro
ISPRO=True
except ImportError:
ISPRO=False
pass
import socketserver
class SyslogUDPHandler(socketserver.BaseRequestHandler):
def extract_data_from_regex(self,regex,line):
try:
matches = re.finditer(regex, line, re.MULTILINE)
sgroups=[]
for matchNum, match in enumerate(matches, start=1):
for groupNum in range(0, len(match.groups())):
groupNum = groupNum + 1
sgroups.append(match.group(groupNum))
return sgroups
except:
return None
def handle(self):
data = bytes.decode(self.request[0].strip(), encoding="utf-8")
message = str(data)
#get current timestamp
ts = int(time.time())
socket = self.request[1]
dev=db_device.query_device_by_ip(self.client_address[0])
regex=r'(.*),?(info.*|warning|critical) mikrowizard(\d+):.*'
if dev:
info=self.extract_data_from_regex(regex,message)
opts=util.build_api_options(dev)
try:
int(info[2])
if dev and dev.id != int(info[2]):
log.error("Device id mismatch ignoring syslog for ip : {}".format(self.client_address[0]))
except:
log.error("**device id mismatch")
log.error(message)
log.error(self.client_address[0])
log.error("device id mismatch**")
dev=False
pass
if dev and dev.id == int(info[2]) and 'mikrowizard' in message and 'via api' not in message:
if 'system,info,account' in message:
regex = r"user (.*) logged (in|out) from (..*)via.(.*)"
info=self.extract_data_from_regex(regex,message)
users=util.get_local_users(opts)
try:
if info[0] in users:
msg='local'
else:
msg='radius'
if 'logged in' in message:
if 'via api' not in message:
db_AA.Auth.add_log(dev.id, 'loggedin', info[0] , info[2] , info[3],timestamp=ts,message=msg)
elif 'logged out' in message:
if info[0] in users:
db_AA.Auth.add_log(dev.id, 'loggedout', info[0] , info[2] , info[3],timestamp=ts,message=msg)
except Exception as e:
log.error(e)
log.error(message)
elif 'system,error,critical' in message:
if "login failure" in message:
users=util.get_local_users(opts)
regex = r"login failure for user (.*) from (..*)via.(.*)"
info=self.extract_data_from_regex(regex,message)
ts = int(time.time())
if info[0] in users:
msg='local'
else:
msg='radius'
db_AA.Auth.add_log(dev.id, 'failed', info[0] , info[1] , info[2],timestamp=ts,message=msg)
elif "rebooted" in message:
regex=r'system,error,critical mikrowizard\d+: (.*)'
info=self.extract_data_from_regex(regex,message)
db_events.state_event(dev.id, "syslog", "Unexpected Reboot","Critical",1,info[0])
elif 'system,info mikrowizard' in message:
regex= r"system,info mikrowizard\d+: (.*) (changed|added|removed|unscheduled) by (winbox-\d.{1,3}\d\/.*\(winbox\)|mac-msg\(winbox\)|tcp-msg\(winbox\)|ssh|telnet|api|api-ssl|.*\/web|ftp|www-ssl).*:(.*)@(.*) \((.*)\)"
if re.match(regex, message):
info=self.extract_data_from_regex(regex, message)
address=info[4].split('/')
ctype=''
if 'winbox' in info[2]:
ctype='winbox'
if 'tcp' in info[2]:
ctype='winbox-tcp'
elif 'mac' in info[2]:
ctype='winbox-mac'
if 'terminal' in address:
ctype+='/terminal'
elif 'ssh' in info[2]:
ctype='ssh'
elif 'telnet' in info[2]:
ctype='telnet'
elif '/web' in info[2]:
ctype=info[2].split('/')[1] + " " + "({})".format(info[2].split('/')[0])
elif 'api' in info[2]:
ctype='api'
db_AA.Account.add_log(dev.id, info[0], info[1], info[3],message,ctype, address[0], info[5])
elif "rebooted" in message:
db_events.state_event(dev.id, "syslog", "Router Rebooted","info",1,info[0])
else:
regex = r"system,info mikrowizard\d+: (.*) (changed|added|removed|unscheduled) by (.*)"
info=self.extract_data_from_regex(regex,message)
db_AA.Account.add_log(dev.id, info[0], info[1], info[2],message)
elif 'interface,info mikrowizard' in message:
link_regex = r"interface,info mikrowizard\d+: (.*) link (down|up).*"
events=list(db_events.get_events_by_src_and_status("syslog", 0,dev.id).dicts())
if "link down" in message:
info=self.extract_data_from_regex(link_regex,message)
db_events.state_event(dev.id, "syslog", "Link Down: " + info[0],"Warning",0,"Link is down for {}".format(info[0]))
elif "link up" in message:
info=self.extract_data_from_regex(link_regex,message)
util.check_or_fix_event(events,'state',"Link Down: " + info[0])
elif "dhcp,info mikrowizard" in message:
dhcp_regex=r'dhcp,info mikrowizard\d+: (dhcp-client|.*) (deassigned|assigned|.*) (\d+\.\d+\.\d+\.\d+|on.*address)\s*(from|to|$)\s*(.*)'
info=self.extract_data_from_regex(dhcp_regex,message)
if info and "assigned" in message:
db_events.state_event(dev.id, "syslog", "dhcp assigned","info",1,"server {} assigned {} to {}".format(info[0],info[2],info[4]))
elif info and "deassigned" in message:
db_events.state_event(dev.id, "syslog", "dhcp deassigned","info",1,"server {} deassigned {} from {}".format(info[0],info[2],info[4]))
elif info and "dhcp-client" in message:
db_events.state_event(dev.id, "syslog", "dhcp client","info",1,"{} {}".format(info[1],info[2]))
elif "wireless,info mikrowizard" in message:
if ISPRO:
utilpro.wireless_syslog_event(dev ,message)
else:
regex=r'wireless,info mikrowizard\d+: ([0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2}:[0-9A-Fa-f]{2})@(.*): (connected|disconnected), (signal strength|.*)? (-?\d{2})?.*'
info=self.extract_data_from_regex(regex,message)
if info:
strength=""
if len(info)>4:
strength=info[4]
db_events.state_event(dev.id, "syslog", "wireless client", "info", 1, "{} {} {} {} {}".format(info[0], info[1], info[2], info[3],strength))
log.error(len(info))
log.error(message)
else:
log.error(message)
if __name__ == "__main__":
try:
server = socketserver.UDPServer(("0.0.0.0",5014), SyslogUDPHandler)
server.serve_forever(poll_interval=0.5)
except (IOError, SystemExit):
raise

159
py/mules/updater.py Normal file
View file

@ -0,0 +1,159 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# updater.py: independent worker process for updating MikroWizard to latest version
# MikroWizard.com , Mikrotik router management solution
# Author: sepehr.ha@gmail.com
import time
import datetime
from libs import util
from pathlib import Path
from libs.db import db_sysconfig
import requests
import logging
import os
import hashlib
import zipfile
import subprocess
log = logging.getLogger("Updater_mule")
def set_get_install_date():
install_date=False
try:
install_date=db_sysconfig.get_sysconfig('install_date')
except:
pass
if not install_date:
install_date=datetime.datetime.now()
db_sysconfig.set_sysconfig('install_date',install_date.strftime("%Y-%m-%d %H:%M:%S"))
return install_date
# Example usage
def check_sha256(filename, expect):
"""Check if the file with the name "filename" matches the SHA-256 sum
in "expect"."""
h = hashlib.sha256()
# This will raise an exception if the file doesn't exist. Catching
# and handling it is left as an exercise for the reader.
try:
with open(filename, 'rb') as fh:
# Read and hash the file in 4K chunks. Reading the whole
# file at once might consume a lot of memory if it is
# large.
while True:
data = fh.read(4096)
if len(data) == 0:
break
else:
h.update(data)
return expect == h.hexdigest()
except Exception as e:
return False
def extract_zip_reload(filename,dst):
"""Extract the contents of the zip file "filename" to the directory
"dst". Then reload the updated modules."""
with zipfile.ZipFile(filename, 'r') as zip_ref:
zip_ref.extractall(dst)
# run db migrate
dir ="/app/"
cmd = "cd {}; PYTHONPATH={}py PYSRV_CONFIG_PATH={} python3 scripts/dbmigrate.py".format(dir, dir, "/conf/server-conf.json")
p = subprocess.Popen(cmd, shell=True)
(output, err) = p.communicate()
#This makes the wait possible
p_status = p.wait()
#touch server reload file /app/reload
os.remove(filename)
Path('/app/reload').touch()
def main():
while True:
next_hour = (time.time() // 3600 + 1) * 3600
sleep_time = next_hour - time.time()
# Code to be executed every hour
print("Running hourly Update checker ...")
interfaces = util.get_ethernet_wifi_interfaces()
hwid = util.generate_serial_number(interfaces)
username=False
try:
username = db_sysconfig.get_sysconfig('username')
except:
log.error("No username found")
time.sleep(sleep_time)
continue
# util.send_mikrowizard_request(params)
if not username or username.strip()=="":
log.error("No username found")
time.sleep(sleep_time)
continue
install_date=set_get_install_date()
from _version import __version__
#convert install_date string "%Y-%m-%d %H:%M:%S" to datetime
install_date = datetime.datetime.strptime(install_date, "%Y-%m-%d %H:%M:%S").strftime("%Y%m%d")
# convert install_date from "%Y-%m-%d %H:%M:%S" to ""%Y%m%d"" and append to serial_number
hwid += "-"+install_date
params={
"serial_number": hwid,
"username": username.strip(),
"version": __version__
}
res=False
url="http://mikrowizard.com/wp-json/mikrowizard/v1/get_update"
# send post request to server mikrowizard.com with params in json
try:
response = requests.post(url, json=params)
res = response
except:
time.sleep(sleep_time)
continue
# get response from server
try:
if res and res.status_code == 200:
res=res.json()
if 'token' in res:
params={
"token":res['token'],
"file_name":res['filename'],
"username":username.strip()
}
log.info("Update available/Downloading...")
else:
time.sleep(sleep_time)
continue
except Exception as e:
log.error(e)
# check if filename exist in /app/ and checksum is same then dont continue
if check_sha256("/app/"+res['filename'], res['sha256']):
log.error("Checksum match, File exist")
extract_zip_reload("/app/"+res['filename'],"/app/")
time.sleep(sleep_time)
continue
download_url="http://mikrowizard.com/wp-json/mikrowizard/v1/download_update"
# send post request to server mikrowizard.com with params in json
r = requests.post(download_url,json=params,stream=True)
if "invalid" in r.text or r.text=='false':
log.error("Invalid response")
time.sleep(sleep_time)
continue
with open("/app/"+res['filename'], 'wb') as fd:
for chunk in r.iter_content(chunk_size=128):
fd.write(chunk)
if check_sha256("/app/"+res['filename'], res['sha256']):
log.error("Update downloaded : "+"/app/"+res['filename'])
extract_zip_reload("/app/"+res['filename'],"/app/")
else:
log.error("Checksum not match")
os.remove("/app/"+res['filename'])
time.sleep(sleep_time)
if __name__ == '__main__':
main()

107
py/task_run.py Normal file
View file

@ -0,0 +1,107 @@
import re
from flask import request, jsonify, g
from playhouse.shortcuts import dict_to_model, update_model_from_dict
import os
os.environ["PYSRV_CONFIG_PATH"] = "/conf/server-conf.json"
import sys
# from libs.db import db_device
# from libs.db import db_groups
# from libs.db import db_tasks
from libs import util
from libs.db import db_user_tasks
from libs.webutil import app, login_required, get_myself,buildResponse
from functools import reduce
import logging
from cron_descriptor import get_description
import queue
from threading import Thread
try:
from libs import utilpro
ISPRO=True
except ImportError:
ISPRO=False
pass
log = logging.getLogger("api.usertasks")
def backup_devs(devices):
num_threads = len(devices)
q = queue.Queue()
threads = []
for dev in devices:
t = Thread(target=util.backup_routers, args=(dev, q))
t.start()
threads.append(t)
for t in threads:
t.join()
res=[]
for _ in range(num_threads):
qres=q.get()
if not qres['state']:
util.log_alert('backup',dev,'Backup failed')
res.append(qres)
return res
def run_snippets(devices,snippet):
num_threads = len(devices)
q = queue.Queue()
threads = []
for dev in devices:
t = Thread(target=util.run_snippets, args=(dev, snippet, q))
t.start()
threads.append(t)
for t in threads:
t.join()
res=[]
for _ in range(num_threads):
qres=q.get()
if 'result' in qres and not qres['result']:
util.log_alert('run_snippet', dev, 'Run Snippet failed')
res.append(qres)
return res
if __name__ == '__main__':
# quick adhoc tests
logging.basicConfig(level=logging.DEBUG)
taksid=sys.argv[1]
#check if taskid is int
if not taksid.isdigit():
print("Wrong Task ID")
exit()
utask=db_user_tasks.UserTasks.get_utask_by_id(taksid)
if not utask:
log.error("No task with this id {}".format(taksid))
exit()
#Get user task from db by id
devices=[]
devices=db_user_tasks.get_task_devices(utask)
# if task.selection_type == "devices":
# devids=task.dev_ids.split(",")
# devices=list(db_device.get_devices_by_id2(devids))
# else:
# for group in task.dev_groups.split(","):
# if not group.isdigit():
# continue
# devices=db_groups.devs2(group)
# task=utaks.select().where(utaks.id == taksid).get()
if utask.task_type == "backup":
log.error("TASK TYPE BACKUP")
res=backup_devs(devices=devices)
elif utask.task_type == "snippet":
log.error("TASK TYPE SNIPPET")
snippet=utask.snippetid.content
if not snippet:
log.error("no snippet")
else:
res=run_snippets(devices=devices, snippet=snippet)
elif utask.task_type == "firmware":
log.error("firmware update")
if not ISPRO:
exit()
res=utilpro.run_firmware_task(utask)
#log.error(res)
#[{'id': 3, 'state': False}, {'id': 1, 'state': False}, {'id': 2, 'state': True}]

28
reqs.txt Normal file
View file

@ -0,0 +1,28 @@
Flask==2.3.2
Flask-Session2==1.3.1
passlib==1.7.4
peewee==3.16.2
peewee-migrate==1.7.1
psycopg2-binary==2.9.6
pytz==2022.7.1
redis==4.5.4
uwsgidecorators==1.1.0
beautifulsoup4==4.10.0
#chardet==4.0.0
click==8.1.7
#click==8.0.3
cron_descriptor==1.4.0
cron_validator==1.0.8
cryptography==3.4.8
feedparser==6.0.11
librouteros==3.2.1
nagiosplugin==1.3.3
paramiko==2.9.3
pexpect==4.9.0
pycryptodome==3.20.0
pyrad==2.4
python_crontab==3.0.0
Requests==2.32.2
#setuptools==59.6.0
uvloop==0.19.0
netifaces==0.11.0

14
rsync.sh Executable file
View file

@ -0,0 +1,14 @@
#!/bin/sh
# rsync files to server and reload
# HOST: replace with your real data
HOST='pi@192.168.100.10'
echo "RSYNCING in 3secs..."
sleep 3
rsync -av --exclude '.git' --exclude '__pycache__' --exclude '*.pyc' --exclude '*.sqlite' * $HOST:/app/
# ask python server to reload sources
ssh $HOST touch /app/VERSION

1
run-docker.sh Executable file
View file

@ -0,0 +1 @@
sudo docker run --rm -it --net host --name mikroman-dev --add-host=host.docker.internal:host-gateway -v /opt/mikrowizard/:/conf/ -v /app/migrations/:/app/migrations/ -v /app/py:/app/py -v /app/firms:/app/firms -v /app/reload:/app/reload mikroman:latest uwsgi --ini /app/conf/uwsgi.ini:uwsgi-docker-dev --touch-reload=/app/reload

1
run-server.sh Executable file
View file

@ -0,0 +1 @@
sudo /usr/local/bin/uwsgi --ini /app/conf/uwsgi.ini:uwsgi-production

5
run.sh Executable file
View file

@ -0,0 +1,5 @@
#!/bin/sh
# run in dev mode
docker run -it --rm --name restpie-dev -p 8100:80 -v `pwd`/:/app/ restpie-dev-image

33
scripts/dbmigrate.py Normal file
View file

@ -0,0 +1,33 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
# dbmigrate.py: migrate the local database
# - run either on dev machine or at server
#
# Author: Tomi.Mickelsson@iki.fi
import os
import config
if config.DATABASE_HOST.startswith("/"):
# sqlite
# note: can't use full path here!
# db will appear in "/app/data/mydb.sqlite" (mapped volume locally)
cmd = "pw_migrate migrate --directory=/app/migrations_sqlite --database=sqlite:/data/mydb.sqlite"
else:
# postgresql
cmd = "pw_migrate migrate --database=postgresql://{}:{}@{}:{}/{}".format(
config.DATABASE_USER,
config.DATABASE_PASSWORD,
config.DATABASE_HOST,
config.DATABASE_PORT,
config.DATABASE_NAME)
print(cmd)
ret = os.system(cmd)
if ret:
print("migrate ERROR", ret)
else:
print("migrate OK")

5
shell.sh Executable file
View file

@ -0,0 +1,5 @@
#!/bin/sh
# run interactive shell inside docker instance
docker exec -it mikroman bash -l

1
templates/auth.html Normal file
View file

@ -0,0 +1 @@

1
templates/example.html Normal file
View file

@ -0,0 +1 @@