mirror of
https://github.com/9001/copyparty.git
synced 2025-12-27 10:15:16 -05:00
cache-control volflag; closes #964
This commit is contained in:
@@ -1782,6 +1782,8 @@ notes:
|
|||||||
* `:c,magic` enables filetype detection for nameless uploads, same as `--magic`
|
* `:c,magic` enables filetype detection for nameless uploads, same as `--magic`
|
||||||
* needs https://pypi.org/project/python-magic/ `python3 -m pip install --user -U python-magic`
|
* needs https://pypi.org/project/python-magic/ `python3 -m pip install --user -U python-magic`
|
||||||
* on windows grab this instead `python3 -m pip install --user -U python-magic-bin`
|
* on windows grab this instead `python3 -m pip install --user -U python-magic-bin`
|
||||||
|
* `cachectl` changes how webbrowser will cache responses (the `Cache-Control` response-header); default is `no-cache` which will prevent repeated downloading of the same file unless necessary (browser will ask copyparty if the file has changed)
|
||||||
|
* adding `?cache` to a link will override this with "fully cache this for 69 seconds"; `?cache=321` is 321 seconds, and `?cache=i` is 7 days
|
||||||
|
|
||||||
|
|
||||||
## database location
|
## database location
|
||||||
|
|||||||
@@ -1281,6 +1281,7 @@ def add_network(ap):
|
|||||||
ap2.add_argument("--xff-src", metavar="CIDR", type=u, default="127.0.0.0/8, ::1/128", help="list of trusted reverse-proxy CIDRs (comma-separated); only accept the real-ip header (\033[33m--xff-hdr\033[0m) and IdP headers if the incoming connection is from an IP within either of these subnets. Specify [\033[32mlan\033[0m] to allow all LAN / private / non-internet IPs. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using \033[32m--xff-hdr=cf-connecting-ip\033[0m (or similar)")
|
ap2.add_argument("--xff-src", metavar="CIDR", type=u, default="127.0.0.0/8, ::1/128", help="list of trusted reverse-proxy CIDRs (comma-separated); only accept the real-ip header (\033[33m--xff-hdr\033[0m) and IdP headers if the incoming connection is from an IP within either of these subnets. Specify [\033[32mlan\033[0m] to allow all LAN / private / non-internet IPs. Can be disabled with [\033[32many\033[0m] if you are behind cloudflare (or similar) and are using \033[32m--xff-hdr=cf-connecting-ip\033[0m (or similar)")
|
||||||
ap2.add_argument("--ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m (comma-separated); examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
ap2.add_argument("--ipa", metavar="CIDR", type=u, default="", help="only accept connections from IP-addresses inside \033[33mCIDR\033[0m (comma-separated); examples: [\033[32mlan\033[0m] or [\033[32m10.89.0.0/16, 192.168.33.0/24\033[0m]")
|
||||||
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here; example: [\033[32m/foo/bar\033[0m]")
|
ap2.add_argument("--rp-loc", metavar="PATH", type=u, default="", help="if reverse-proxying on a location instead of a dedicated domain/subdomain, provide the base location here; example: [\033[32m/foo/bar\033[0m]")
|
||||||
|
ap2.add_argument("--cachectl", metavar="TXT", default="no-cache", help="default-value of the 'Cache-Control' response-header (controls caching in webbrowsers). Default prevents repeated downloading of the same file unless necessary (browser will ask copyparty if the file has changed). Examples: [\033[32mmax-age=604869\033[0m] will cache for 7 days, [\033[32mno-store, max-age=0\033[0m] will always redownload. (volflag=cachectl)")
|
||||||
ap2.add_argument("--http-no-tcp", action="store_true", help="do not listen on TCP/IP for http/https; only listen on unix-domain-sockets")
|
ap2.add_argument("--http-no-tcp", action="store_true", help="do not listen on TCP/IP for http/https; only listen on unix-domain-sockets")
|
||||||
if ANYWIN:
|
if ANYWIN:
|
||||||
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
ap2.add_argument("--reuseaddr", action="store_true", help="set reuseaddr on listening sockets on windows; allows rapid restart of copyparty at the expense of being able to accidentally start multiple instances")
|
||||||
|
|||||||
@@ -1054,6 +1054,7 @@ class AuthSrv(object):
|
|||||||
self.is_lxc = args.c == ["/z/initcfg"]
|
self.is_lxc = args.c == ["/z/initcfg"]
|
||||||
|
|
||||||
self._vf0b = {
|
self._vf0b = {
|
||||||
|
"cachectl": self.args.cachectl,
|
||||||
"tcolor": self.args.tcolor,
|
"tcolor": self.args.tcolor,
|
||||||
"du_iwho": self.args.du_iwho,
|
"du_iwho": self.args.du_iwho,
|
||||||
"shr_who": self.args.shr_who if self.args.shr else "no",
|
"shr_who": self.args.shr_who if self.args.shr else "no",
|
||||||
|
|||||||
@@ -95,6 +95,7 @@ def vf_vmap() -> dict[str, str]:
|
|||||||
}
|
}
|
||||||
for k in (
|
for k in (
|
||||||
"bup_ck",
|
"bup_ck",
|
||||||
|
"cachectl",
|
||||||
"casechk",
|
"casechk",
|
||||||
"chmod_d",
|
"chmod_d",
|
||||||
"chmod_f",
|
"chmod_f",
|
||||||
@@ -419,6 +420,7 @@ flagcats = {
|
|||||||
"zipmaxt=no": "reply with 'no' if download-as-zip exceeds max",
|
"zipmaxt=no": "reply with 'no' if download-as-zip exceeds max",
|
||||||
"zipmaxu": "zip-size-limit does not apply to authenticated users",
|
"zipmaxu": "zip-size-limit does not apply to authenticated users",
|
||||||
"nopipe": "disable race-the-beam (download unfinished uploads)",
|
"nopipe": "disable race-the-beam (download unfinished uploads)",
|
||||||
|
"cachectl=no-cache": "controls caching in webbrowsers",
|
||||||
"mv_retry": "ms-windows: timeout for renaming busy files",
|
"mv_retry": "ms-windows: timeout for renaming busy files",
|
||||||
"rm_retry": "ms-windows: timeout for deleting busy files",
|
"rm_retry": "ms-windows: timeout for deleting busy files",
|
||||||
"davauth": "ask webdav clients to login for all folders",
|
"davauth": "ask webdav clients to login for all folders",
|
||||||
|
|||||||
@@ -149,8 +149,6 @@ _ = (argparse, threading)
|
|||||||
|
|
||||||
USED4SEC = {"usedforsecurity": False} if sys.version_info > (3, 9) else {}
|
USED4SEC = {"usedforsecurity": False} if sys.version_info > (3, 9) else {}
|
||||||
|
|
||||||
NO_CACHE = {"Cache-Control": "no-cache"}
|
|
||||||
|
|
||||||
ALL_COOKIES = "k304 no304 js idxh dots cppwd cppws".split()
|
ALL_COOKIES = "k304 no304 js idxh dots cppwd cppws".split()
|
||||||
|
|
||||||
BADXFF = " due to dangerous misconfiguration (the http-header specified by --xff-hdr was received from an untrusted reverse-proxy)"
|
BADXFF = " due to dangerous misconfiguration (the http-header specified by --xff-hdr was received from an untrusted reverse-proxy)"
|
||||||
@@ -973,7 +971,7 @@ class HttpCli(object):
|
|||||||
def permit_caching(self) -> None:
|
def permit_caching(self) -> None:
|
||||||
cache = self.uparam.get("cache")
|
cache = self.uparam.get("cache")
|
||||||
if cache is None:
|
if cache is None:
|
||||||
self.out_headers.update(NO_CACHE)
|
self.out_headers["Cache-Control"] = self.vn.flags["cachectl"]
|
||||||
return
|
return
|
||||||
|
|
||||||
n = 69 if not cache else 604869 if cache == "i" else int(cache)
|
n = 69 if not cache else 604869 if cache == "i" else int(cache)
|
||||||
@@ -5159,7 +5157,7 @@ class HttpCli(object):
|
|||||||
file_ts = int(max(ts_md, self.E.t0))
|
file_ts = int(max(ts_md, self.E.t0))
|
||||||
file_lastmod, do_send, _ = self._chk_lastmod(file_ts)
|
file_lastmod, do_send, _ = self._chk_lastmod(file_ts)
|
||||||
self.out_headers["Last-Modified"] = file_lastmod
|
self.out_headers["Last-Modified"] = file_lastmod
|
||||||
self.out_headers.update(NO_CACHE)
|
self.out_headers["Cache-Control"] = "no-cache"
|
||||||
status = 200 if do_send else 304
|
status = 200 if do_send else 304
|
||||||
|
|
||||||
arg_base = "?"
|
arg_base = "?"
|
||||||
|
|||||||
@@ -167,7 +167,7 @@ class Cfg(Namespace):
|
|||||||
ex = "ah_alg bname chdir chmod_f chpw_db doctitle df epilogues exit favico ipa html_head html_head_d html_head_s idp_login idp_logout lg_sba lg_sbf log_date log_fk md_sba md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i opds_exts preadmes prologues readmes shr tcolor textfiles txt_eol ufavico ufavico_h unlist vname xff_src zipmaxt R RS SR"
|
ex = "ah_alg bname chdir chmod_f chpw_db doctitle df epilogues exit favico ipa html_head html_head_d html_head_s idp_login idp_logout lg_sba lg_sbf log_date log_fk md_sba md_sbf name og_desc og_site og_th og_title og_title_a og_title_v og_title_i opds_exts preadmes prologues readmes shr tcolor textfiles txt_eol ufavico ufavico_h unlist vname xff_src zipmaxt R RS SR"
|
||||||
ka.update(**{k: "" for k in ex.split()})
|
ka.update(**{k: "" for k in ex.split()})
|
||||||
|
|
||||||
ex = "ban_403 ban_404 ban_422 ban_pw ban_pwc ban_url dont_ban rss_fmt_d rss_fmt_t spinner"
|
ex = "ban_403 ban_404 ban_422 ban_pw ban_pwc ban_url dont_ban cachectl rss_fmt_d rss_fmt_t spinner"
|
||||||
ka.update(**{k: "no" for k in ex.split()})
|
ka.update(**{k: "no" for k in ex.split()})
|
||||||
|
|
||||||
ex = "ext_th grp idp_h_usr idp_hm_usr ipr on403 on404 qr_file xac xad xar xau xban xbc xbd xbr xbu xiu xm"
|
ex = "ext_th grp idp_h_usr idp_hm_usr ipr on403 on404 qr_file xac xad xar xau xban xbc xbd xbr xbu xiu xm"
|
||||||
|
|||||||
Reference in New Issue
Block a user