[squid-users] MS update woes
Alex Samad
alex at samad.com.au
Mon Jan 18 00:58:37 UTC 2016
Hi
so I have this in place now
This works well for delaying YAY
#
# Delay Pools
# http://wiki.squid-cache.org/Features/DelayPools
# http://www.serverwatch.com/tutorials/article.php/3357241/Reining-in-Bandwidth-With-Squid-Proxying.htm
delay_pools 1
delay_class 1 1
# 10Mb/s fille rate , 20Mb/s reserve
# 10485760/8 = 1310720
# 20971520/8 = 2621440
delay_parameters 1 1310720/2621440
# What to delay
acl Delay_ALL src all
acl Delay_Domain dstdomain -i "/etc/squid/lists/delayDom.lst"
delay_access 1 deny DMZSRV
delay_access 1 allow Delay_Domain
But this doesn't seem to be working
# ####
# MS Windows UpDate ACL's
# ####
acl windowsupdate_url url_regex -i
microsoft.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip)
acl windowsupdate_url url_regex -i
windowsupdate.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip)
acl windowsupdate_url url_regex -i
windows.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip)
# http://wiki.squid-cache.org/SquidFaq/WindowsUpdate
# 800M for MS SQL patch file
range_offset_limit 800 MB
maximum_object_size 800 MB
range_offset_limit 800 MB windowsupdate_url
maximum_object_size 800 MB windowsupdate_url
# http://www.squid-cache.org/Versions/v3/3.5/cfgman/quick_abort_min.html
# If you want retrievals to always continue if they are being
# cached set 'quick_abort_min' to '-1 KB'.
quick_abort_min -1
## range_offset_list is set to just MS URL
## set quick abort back to normal
#quick_abort_min 16 KB
#quick_abort_max 1024 KB
#quick_abort_pct 95
# Now all that this line tells us to do is cache all .cab, .exe, .msu,
.msu, .msf, .asf, .psf, .wma,..... to .zip from microsoft.com,
# and the lifetime of the object in the cache is 4320 minutes (aka 3
days) to 43200 minutes (aka 30 days).
# Each of the downloaded objects are added to the cache, and then
whenever a request arrives indicating the cache copy must not be used
# it gets converted to an if-modified-since check instead of a new
copy reload request.
# Change to 90 days
#refresh_pattern -i
microsoft.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip) 4320 80%
43200 reload-into-ims
#refresh_pattern -i
windowsupdate.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip) 4320
80% 43200 reload-into-ims
#refresh_pattern -i
windows.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip) 4320 80%
43200 reload-into-ims
refresh_pattern -i
microsoft.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip) 4320 80%
129600 reload-into-ims
refresh_pattern -i
windowsupdate.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip) 4320
80% 129600 reload-into-ims
refresh_pattern -i
windows.com/.*\.(cab|exe|ms[i|u|f]|[ap]sf|wm[v|a]|dat|zip) 4320 80%
129600 reload-into-ims
# Add any of your own refresh_pattern entries above these.
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
refresh_pattern . 0 20% 4320
I have turned this on to stop all but my test machine from downloading
from there.
# ####
# Blockers
# Off by default
# ####
# if there is a problem with MS update uncomment this
http_access deny !DMZSRV windowsupdate_url
seems like its not caching again.
So I was thinking is there a way in the acl to allow some machine to
access the url's but only if there are cached !
and others to pull them down from the internet ??
Alex
More information about the squid-users
mailing list