mirror of
https://git.eworm.de/cgit/routeros-scripts
synced 2025-06-21 09:25:42 +02:00
global-config: download scripts from rsc.eworm.de
Currently AI bots are crawling website all around the world. For a website hosting git content this adds a lot of extra load and traffic: The site has lots of sections, repositories have a lot of files, branches, tags, commit ids, etc... Multiply that and you have a nearly unlimited number of unique urls. The bots try to get each and every of these. To speed up the learing process on their side a swarm of hundreds, thousands or more ip addresses is active at the same time, ultimately DDOS'ing the websites, making it inaccessible. 😳🤬 Well, there is one single file all of these AI bots are not interested in: robots.txt 🤬🤬 On top some use random user agent strings, making filtering impossible. 🤬🤬🤬 For a short term sulution I deploy the repository content as static files, hopefully making these accessible at least. We will see.
This commit is contained in:
parent
e8b1e19b28
commit
ccf17a438c
1 changed files with 4 additions and 2 deletions
|
@ -217,14 +217,16 @@
|
|||
:global GpsTrackUrl "https://example.com/index.php";
|
||||
|
||||
# This is the base url to fetch scripts from.
|
||||
:global ScriptUpdatesBaseUrl "https://git.eworm.de/cgit/routeros-scripts/plain/";
|
||||
:global ScriptUpdatesBaseUrl "https://rsc.eworm.de/main/";
|
||||
# alternative urls - main: stable code - next: currently in development
|
||||
#:global ScriptUpdatesBaseUrl "https://rsc.eworm.de/next/";
|
||||
#:global ScriptUpdatesBaseUrl "https://git.eworm.de/cgit/routeros-scripts/plain/";
|
||||
#:global ScriptUpdatesBaseUrl "https://raw.githubusercontent.com/eworm-de/routeros-scripts/main/";
|
||||
#:global ScriptUpdatesBaseUrl "https://raw.githubusercontent.com/eworm-de/routeros-scripts/next/";
|
||||
#:global ScriptUpdatesBaseUrl "https://gitlab.com/eworm-de/routeros-scripts/raw/main/";
|
||||
#:global ScriptUpdatesBaseUrl "https://gitlab.com/eworm-de/routeros-scripts/raw/next/";
|
||||
:global ScriptUpdatesUrlSuffix "";
|
||||
# use next branch with default url (git.eworm.de)
|
||||
# use next branch with my git url (git.eworm.de)
|
||||
#:global ScriptUpdatesUrlSuffix "?h=next";
|
||||
|
||||
# Use this for defaults with $ScriptRunOnce
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue