diff options
author | Gerald Carter <jerry@samba.org> | 2004-08-16 15:25:57 +0000 |
---|---|---|
committer | Gerald (Jerry) Carter <jerry@samba.org> | 2007-10-10 10:52:21 -0500 |
commit | db1ff9b348a16d769f007362fa57ab89f56d101c (patch) | |
tree | 1f1e00ec354436999fcd55c2fa1489f745a346f7 | |
parent | 7fd7fbf472d5289e70dbb94dcec93b7c9f970d9a (diff) | |
download | samba-db1ff9b348a16d769f007362fa57ab89f56d101c.tar.gz samba-db1ff9b348a16d769f007362fa57ab89f56d101c.tar.xz samba-db1ff9b348a16d769f007362fa57ab89f56d101c.zip |
r1833: patch from James Peach to get swat to look for index.html by default when given a trailing directory/
(This used to be commit 980740da784ce00ad1b388872297b82d4d368044)
-rw-r--r-- | source3/web/cgi.c | 28 |
1 files changed, 24 insertions, 4 deletions
diff --git a/source3/web/cgi.c b/source3/web/cgi.c index 07b9f52ff77..b1aa8ae754c 100644 --- a/source3/web/cgi.c +++ b/source3/web/cgi.c @@ -421,18 +421,38 @@ static void cgi_download(char *file) } } - if (!file_exist(file, &st)) { + if (sys_stat(file, &st) != 0) + { cgi_setup_error("404 File Not Found","", "The requested file was not found"); } - fd = web_open(file,O_RDONLY,0); + if (S_ISDIR(st.st_mode)) + { + snprintf(buf, sizeof(buf), "%s/index.html", file); + if (!file_exist(buf, &st) || !S_ISREG(st.st_mode)) + { + cgi_setup_error("404 File Not Found","", + "The requested file was not found"); + } + } + else if (S_ISREG(st.st_mode)) + { + snprintf(buf, sizeof(buf), "%s", file); + } + else + { + cgi_setup_error("404 File Not Found","", + "The requested file was not found"); + } + + fd = web_open(buf,O_RDONLY,0); if (fd == -1) { cgi_setup_error("404 File Not Found","", "The requested file was not found"); } printf("HTTP/1.0 200 OK\r\n"); - if ((p=strrchr_m(file,'.'))) { + if ((p=strrchr_m(buf, '.'))) { if (strcmp(p,".gif")==0) { printf("Content-Type: image/gif\r\n"); } else if (strcmp(p,".jpg")==0) { @@ -554,7 +574,7 @@ void cgi_setup(const char *rootdir, int auth_required) string_sub(url, "/swat/", "", 0); - if (url[0] != '/' && strstr(url,"..")==0 && file_exist(url, NULL)) { + if (url[0] != '/' && strstr(url,"..")==0) { cgi_download(url); } |