From ccf17a438cd10f77f0f0c0cd83f6f6729b6479c7 Mon Sep 17 00:00:00 2001 From: Christian Hesse Date: Thu, 23 Jan 2025 23:07:38 +0100 Subject: [PATCH] global-config: download scripts from rsc.eworm.de MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Currently AI bots are crawling website all around the world. For a website hosting git content this adds a lot of extra load and traffic: The site has lots of sections, repositories have a lot of files, branches, tags, commit ids, etc... Multiply that and you have a nearly unlimited number of unique urls. The bots try to get each and every of these. To speed up the learing process on their side a swarm of hundreds, thousands or more ip addresses is active at the same time, ultimately DDOS'ing the websites, making it inaccessible. 😳🤬 Well, there is one single file all of these AI bots are not interested in: robots.txt 🤬🤬 On top some use random user agent strings, making filtering impossible. 🤬🤬🤬 For a short term sulution I deploy the repository content as static files, hopefully making these accessible at least. We will see. --- global-config.rsc | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/global-config.rsc b/global-config.rsc index c63283d..3e17320 100644 --- a/global-config.rsc +++ b/global-config.rsc @@ -217,14 +217,16 @@ :global GpsTrackUrl "https://example.com/index.php"; # This is the base url to fetch scripts from. -:global ScriptUpdatesBaseUrl "https://git.eworm.de/cgit/routeros-scripts/plain/"; +:global ScriptUpdatesBaseUrl "https://rsc.eworm.de/main/"; # alternative urls - main: stable code - next: currently in development +#:global ScriptUpdatesBaseUrl "https://rsc.eworm.de/next/"; +#:global ScriptUpdatesBaseUrl "https://git.eworm.de/cgit/routeros-scripts/plain/"; #:global ScriptUpdatesBaseUrl "https://raw.githubusercontent.com/eworm-de/routeros-scripts/main/"; #:global ScriptUpdatesBaseUrl "https://raw.githubusercontent.com/eworm-de/routeros-scripts/next/"; #:global ScriptUpdatesBaseUrl "https://gitlab.com/eworm-de/routeros-scripts/raw/main/"; #:global ScriptUpdatesBaseUrl "https://gitlab.com/eworm-de/routeros-scripts/raw/next/"; :global ScriptUpdatesUrlSuffix ""; -# use next branch with default url (git.eworm.de) +# use next branch with my git url (git.eworm.de) #:global ScriptUpdatesUrlSuffix "?h=next"; # Use this for defaults with $ScriptRunOnce