mirror of
https://github.com/eworm-de/routeros-scripts.git
synced 2025-08-19 00:58:38 +02:00
global-functions: $FetchHuge: another workaround for complete file
Turns out the workaround in $WaitForFile (commit
8231c3e833
) is not sufficient. It helps
sometimes, but not always. Possibly depends on CPU speed and bandwidth
of internet connection... Who knows!? 🤪
But! Reading the file goes beyond the known file size. That's suspicious
and indicates this exact issue. So add a delay, and keep reading until
sizes are equal.
This commit is contained in:
parent
3ada3055ff
commit
191cc1b952
1 changed files with 5 additions and 1 deletions
|
@ -479,9 +479,13 @@
|
||||||
:local FileSize [ /file/get $FileName size ];
|
:local FileSize [ /file/get $FileName size ];
|
||||||
:local Return "";
|
:local Return "";
|
||||||
:local VarSize 0;
|
:local VarSize 0;
|
||||||
:while ($VarSize < $FileSize) do={
|
:while ($VarSize != $FileSize) do={
|
||||||
:set Return ($Return . ([ /file/read offset=$VarSize chunk-size=32768 file=$FileName as-value ]->"data"));
|
:set Return ($Return . ([ /file/read offset=$VarSize chunk-size=32768 file=$FileName as-value ]->"data"));
|
||||||
|
:set FileSize [ /file/get $FileName size ];
|
||||||
:set VarSize [ :len $Return ];
|
:set VarSize [ :len $Return ];
|
||||||
|
:if ($VarSize > $FileSize) do={
|
||||||
|
:delay 100ms;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
/file/remove $DirName;
|
/file/remove $DirName;
|
||||||
:return $Return;
|
:return $Return;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue