2014-10-16 20:01:43 +02:00
|
|
|
/*
|
|
|
|
The MIT License (MIT)
|
|
|
|
|
2017-03-23 00:02:36 +01:00
|
|
|
Copyright (c) 2014-2017 DutchCoders [https://github.com/dutchcoders/]
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
of this software and associated documentation files (the "Software"), to deal
|
|
|
|
in the Software without restriction, including without limitation the rights
|
|
|
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
copies of the Software, and to permit persons to whom the Software is
|
|
|
|
furnished to do so, subject to the following conditions:
|
|
|
|
|
|
|
|
The above copyright notice and this permission notice shall be included in
|
|
|
|
all copies or substantial portions of the Software.
|
|
|
|
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
|
|
THE SOFTWARE.
|
|
|
|
*/
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
package server
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
import (
|
|
|
|
// _ "transfer.sh/app/handlers"
|
|
|
|
// _ "transfer.sh/app/utils"
|
|
|
|
|
|
|
|
"archive/tar"
|
|
|
|
"archive/zip"
|
|
|
|
"bytes"
|
|
|
|
"compress/gzip"
|
2017-03-28 16:12:31 +02:00
|
|
|
"encoding/json"
|
2014-10-16 20:01:43 +02:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
2016-02-19 10:40:26 +01:00
|
|
|
"html"
|
2014-10-20 14:54:42 +02:00
|
|
|
html_template "html/template"
|
2014-10-16 20:01:43 +02:00
|
|
|
"io"
|
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
|
|
|
"math/rand"
|
|
|
|
"mime"
|
|
|
|
"net/http"
|
2017-03-22 22:23:29 +01:00
|
|
|
"net/url"
|
2014-10-16 20:01:43 +02:00
|
|
|
"os"
|
2017-03-22 22:23:29 +01:00
|
|
|
"path"
|
2014-10-16 20:01:43 +02:00
|
|
|
"path/filepath"
|
2014-10-20 14:54:42 +02:00
|
|
|
"strconv"
|
2014-10-16 20:01:43 +02:00
|
|
|
"strings"
|
2017-03-28 16:12:31 +02:00
|
|
|
"sync"
|
2014-10-16 20:01:43 +02:00
|
|
|
text_template "text/template"
|
2014-10-20 14:54:42 +02:00
|
|
|
"time"
|
2016-01-14 09:33:02 +01:00
|
|
|
|
2017-03-22 22:23:29 +01:00
|
|
|
"net"
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
web "github.com/dutchcoders/transfer.sh-web"
|
2016-01-14 09:33:02 +01:00
|
|
|
"github.com/gorilla/mux"
|
|
|
|
"github.com/russross/blackfriday"
|
2018-07-01 13:51:13 +02:00
|
|
|
|
|
|
|
"encoding/base64"
|
2018-07-07 20:23:50 +02:00
|
|
|
qrcode "github.com/skip2/go-qrcode"
|
2014-10-16 20:01:43 +02:00
|
|
|
)
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
var (
|
2017-03-22 22:25:42 +01:00
|
|
|
htmlTemplates = initHTMLTemplates()
|
|
|
|
textTemplates = initTextTemplates()
|
2017-03-22 18:09:21 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
func stripPrefix(path string) string {
|
|
|
|
return strings.Replace(path, web.Prefix+"/", "", -1)
|
|
|
|
}
|
|
|
|
|
|
|
|
func initTextTemplates() *text_template.Template {
|
|
|
|
templateMap := text_template.FuncMap{"format": formatNumber}
|
|
|
|
|
|
|
|
// Templates with functions available to them
|
|
|
|
var templates = text_template.New("").Funcs(templateMap)
|
|
|
|
return templates
|
|
|
|
}
|
|
|
|
|
|
|
|
func initHTMLTemplates() *html_template.Template {
|
|
|
|
templateMap := html_template.FuncMap{"format": formatNumber}
|
|
|
|
|
|
|
|
// Templates with functions available to them
|
|
|
|
var templates = html_template.New("").Funcs(templateMap)
|
|
|
|
|
|
|
|
return templates
|
|
|
|
}
|
|
|
|
|
2014-10-16 20:01:43 +02:00
|
|
|
func healthHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
fmt.Fprintf(w, "Approaching Neutral Zone, all systems normal and functioning.")
|
|
|
|
}
|
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
/* The preview handler will show a preview of the content for browsers (accept type text/html), and referer is not transfer.sh */
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) previewHandler(w http.ResponseWriter, r *http.Request) {
|
2014-11-13 21:41:43 +01:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
token := vars["token"]
|
|
|
|
filename := vars["filename"]
|
|
|
|
|
2017-03-22 22:27:26 +01:00
|
|
|
contentType, contentLength, err := s.storage.Head(token, filename)
|
2014-11-13 21:41:43 +01:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, http.StatusText(404), 404)
|
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
var templatePath string
|
|
|
|
var content html_template.HTML
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
switch {
|
|
|
|
case strings.HasPrefix(contentType, "image/"):
|
|
|
|
templatePath = "download.image.html"
|
|
|
|
case strings.HasPrefix(contentType, "video/"):
|
|
|
|
templatePath = "download.video.html"
|
|
|
|
case strings.HasPrefix(contentType, "audio/"):
|
|
|
|
templatePath = "download.audio.html"
|
|
|
|
case strings.HasPrefix(contentType, "text/"):
|
|
|
|
templatePath = "download.markdown.html"
|
|
|
|
|
|
|
|
var reader io.ReadCloser
|
2017-03-22 22:27:26 +01:00
|
|
|
if reader, _, _, err = s.storage.Get(token, filename); err != nil {
|
2014-11-13 21:41:43 +01:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
var data []byte
|
|
|
|
if data, err = ioutil.ReadAll(reader); err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
2014-11-13 21:41:43 +01:00
|
|
|
|
|
|
|
if strings.HasPrefix(contentType, "text/x-markdown") || strings.HasPrefix(contentType, "text/markdown") {
|
2018-07-01 14:53:28 +02:00
|
|
|
escapedData := html.EscapeString(string(data))
|
|
|
|
output := blackfriday.MarkdownCommon([]byte(escapedData))
|
2014-11-13 21:41:43 +01:00
|
|
|
content = html_template.HTML(output)
|
|
|
|
} else if strings.HasPrefix(contentType, "text/plain") {
|
2016-02-19 10:40:26 +01:00
|
|
|
content = html_template.HTML(fmt.Sprintf("<pre>%s</pre>", html.EscapeString(string(data))))
|
2014-11-13 21:41:43 +01:00
|
|
|
} else {
|
2016-01-14 09:33:02 +01:00
|
|
|
templatePath = "download.sandbox.html"
|
2014-11-13 21:41:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
templatePath = "download.html"
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-10-19 22:37:23 +02:00
|
|
|
resolvedUrl := resolveUrl(r, getURL(r).ResolveReference(r.URL), true)
|
2018-07-01 13:51:13 +02:00
|
|
|
var png []byte
|
2018-10-19 22:37:23 +02:00
|
|
|
png, err = qrcode.Encode(resolvedUrl, qrcode.High, 150)
|
2018-07-01 13:51:13 +02:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
qrCode := base64.StdEncoding.EncodeToString(png)
|
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
data := struct {
|
|
|
|
ContentType string
|
|
|
|
Content html_template.HTML
|
|
|
|
Filename string
|
|
|
|
Url string
|
|
|
|
ContentLength uint64
|
2018-07-07 20:23:50 +02:00
|
|
|
GAKey string
|
2018-06-26 18:39:56 +02:00
|
|
|
UserVoiceKey string
|
2018-07-07 20:23:50 +02:00
|
|
|
QRCode string
|
2014-11-13 21:41:43 +01:00
|
|
|
}{
|
|
|
|
contentType,
|
|
|
|
content,
|
|
|
|
filename,
|
2018-10-19 22:37:23 +02:00
|
|
|
resolvedUrl,
|
2014-11-13 21:41:43 +01:00
|
|
|
contentLength,
|
2018-06-26 18:39:56 +02:00
|
|
|
s.gaKey,
|
|
|
|
s.userVoiceKey,
|
2018-07-01 13:51:13 +02:00
|
|
|
qrCode,
|
2014-11-13 21:41:43 +01:00
|
|
|
}
|
|
|
|
|
2017-03-22 22:25:42 +01:00
|
|
|
if err := htmlTemplates.ExecuteTemplate(w, templatePath, data); err != nil {
|
2014-11-13 21:41:43 +01:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// this handler will output html or text, depending on the
|
|
|
|
// support of the client (Accept header).
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) viewHandler(w http.ResponseWriter, r *http.Request) {
|
2014-11-13 21:41:43 +01:00
|
|
|
// vars := mux.Vars(r)
|
|
|
|
|
2017-03-22 21:09:40 +01:00
|
|
|
if acceptsHTML(r.Header) {
|
2017-03-22 22:25:42 +01:00
|
|
|
if err := htmlTemplates.ExecuteTemplate(w, "index.html", nil); err != nil {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
} else {
|
2017-03-22 22:25:42 +01:00
|
|
|
if err := textTemplates.ExecuteTemplate(w, "index.txt", nil); err != nil {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) notFoundHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-20 14:54:42 +02:00
|
|
|
http.Error(w, http.StatusText(404), 404)
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2017-03-23 11:46:59 +01:00
|
|
|
func sanitize(fileName string) string {
|
|
|
|
return path.Clean(path.Base(fileName))
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) postHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
if err := r.ParseMultipartForm(_24K); nil != err {
|
2014-11-13 21:41:43 +01:00
|
|
|
log.Printf("%s", err.Error())
|
2016-08-10 20:50:36 +02:00
|
|
|
http.Error(w, "Error occurred copying to output stream", 500)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
token := Encode(10000000 + int64(rand.Intn(1000000000)))
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "text/plain")
|
|
|
|
|
|
|
|
for _, fheaders := range r.MultipartForm.File {
|
|
|
|
for _, fheader := range fheaders {
|
2017-03-23 11:46:59 +01:00
|
|
|
filename := sanitize(fheader.Filename)
|
2014-10-16 20:01:43 +02:00
|
|
|
contentType := fheader.Header.Get("Content-Type")
|
|
|
|
|
|
|
|
if contentType == "" {
|
|
|
|
contentType = mime.TypeByExtension(filepath.Ext(fheader.Filename))
|
|
|
|
}
|
|
|
|
|
|
|
|
var f io.Reader
|
|
|
|
var err error
|
|
|
|
|
|
|
|
if f, err = fheader.Open(); err != nil {
|
2014-11-13 21:41:43 +01:00
|
|
|
log.Printf("%s", err.Error())
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var b bytes.Buffer
|
|
|
|
|
|
|
|
n, err := io.CopyN(&b, f, _24K+1)
|
|
|
|
if err != nil && err != io.EOF {
|
2014-11-13 21:41:43 +01:00
|
|
|
log.Printf("%s", err.Error())
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
var reader io.Reader
|
|
|
|
|
|
|
|
if n > _24K {
|
2017-03-22 18:09:21 +01:00
|
|
|
file, err := ioutil.TempFile(s.tempPath, "transfer-")
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
defer file.Close()
|
|
|
|
|
|
|
|
n, err = io.Copy(file, io.MultiReader(&b, f))
|
|
|
|
if err != nil {
|
|
|
|
os.Remove(file.Name())
|
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
log.Printf("%s", err.Error())
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
reader, err = os.Open(file.Name())
|
|
|
|
} else {
|
|
|
|
reader = bytes.NewReader(b.Bytes())
|
|
|
|
}
|
|
|
|
|
|
|
|
contentLength := n
|
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
metadata := MetadataForRequest(contentType, r)
|
|
|
|
|
|
|
|
buffer := &bytes.Buffer{}
|
|
|
|
if err := json.NewEncoder(buffer).Encode(metadata); err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, errors.New("Could not encode metadata").Error(), 500)
|
|
|
|
return
|
|
|
|
} else if err := s.storage.Put(token, fmt.Sprintf("%s.metadata", filename), buffer, "text/json", uint64(buffer.Len())); err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, errors.New("Could not save metadata").Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
log.Printf("Uploading %s %s %d %s", token, filename, contentLength, contentType)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2017-03-22 22:27:26 +01:00
|
|
|
if err = s.storage.Put(token, filename, reader, contentType, uint64(contentLength)); err != nil {
|
|
|
|
log.Printf("Backend storage error: %s", err.Error())
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2017-03-22 22:23:29 +01:00
|
|
|
relativeURL, _ := url.Parse(path.Join(token, filename))
|
2018-09-15 13:24:11 +02:00
|
|
|
fmt.Fprintln(w, getURL(r).ResolveReference(relativeURL).String())
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
type Metadata struct {
|
|
|
|
// ContentType is the original uploading content type
|
|
|
|
ContentType string
|
|
|
|
// Secret as knowledge to delete file
|
|
|
|
// Secret string
|
|
|
|
// Downloads is the actual number of downloads
|
|
|
|
Downloads int
|
|
|
|
// MaxDownloads contains the maximum numbers of downloads
|
|
|
|
MaxDownloads int
|
|
|
|
// MaxDate contains the max age of the file
|
|
|
|
MaxDate time.Time
|
2018-06-24 06:46:57 +02:00
|
|
|
// DeletionToken contains the token to match against for deletion
|
|
|
|
DeletionToken string
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
func MetadataForRequest(contentType string, r *http.Request) Metadata {
|
|
|
|
metadata := Metadata{
|
2018-06-24 06:46:57 +02:00
|
|
|
ContentType: contentType,
|
|
|
|
MaxDate: time.Now().Add(time.Hour * 24 * 365 * 10),
|
|
|
|
Downloads: 0,
|
|
|
|
MaxDownloads: 99999999,
|
2018-07-07 20:23:50 +02:00
|
|
|
DeletionToken: Encode(10000000+int64(rand.Intn(1000000000))) + Encode(10000000+int64(rand.Intn(1000000000))),
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if v := r.Header.Get("Max-Downloads"); v == "" {
|
|
|
|
} else if v, err := strconv.Atoi(v); err != nil {
|
|
|
|
} else {
|
|
|
|
metadata.MaxDownloads = v
|
|
|
|
}
|
|
|
|
|
|
|
|
if v := r.Header.Get("Max-Days"); v == "" {
|
|
|
|
} else if v, err := strconv.Atoi(v); err != nil {
|
|
|
|
} else {
|
|
|
|
metadata.MaxDate = time.Now().Add(time.Hour * 24 * time.Duration(v))
|
|
|
|
}
|
|
|
|
|
|
|
|
return metadata
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) putHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
2017-03-23 11:46:59 +01:00
|
|
|
filename := sanitize(vars["filename"])
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
contentLength := r.ContentLength
|
|
|
|
|
|
|
|
var reader io.Reader
|
|
|
|
|
|
|
|
reader = r.Body
|
|
|
|
|
|
|
|
if contentLength == -1 {
|
|
|
|
// queue file to disk, because s3 needs content length
|
|
|
|
var err error
|
|
|
|
var f io.Reader
|
|
|
|
|
|
|
|
f = reader
|
|
|
|
|
|
|
|
var b bytes.Buffer
|
|
|
|
|
|
|
|
n, err := io.CopyN(&b, f, _24K+1)
|
|
|
|
if err != nil && err != io.EOF {
|
2017-03-22 22:28:40 +01:00
|
|
|
log.Printf("Error putting new file: %s", err.Error())
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if n > _24K {
|
2017-03-22 18:09:21 +01:00
|
|
|
file, err := ioutil.TempFile(s.tempPath, "transfer-")
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
2014-11-13 21:41:43 +01:00
|
|
|
log.Printf("%s", err.Error())
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
defer file.Close()
|
|
|
|
|
|
|
|
n, err = io.Copy(file, io.MultiReader(&b, f))
|
|
|
|
if err != nil {
|
|
|
|
os.Remove(file.Name())
|
2014-11-13 21:41:43 +01:00
|
|
|
log.Printf("%s", err.Error())
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
reader, err = os.Open(file.Name())
|
|
|
|
} else {
|
|
|
|
reader = bytes.NewReader(b.Bytes())
|
|
|
|
}
|
|
|
|
|
|
|
|
contentLength = n
|
|
|
|
}
|
|
|
|
|
2018-07-07 09:55:46 +02:00
|
|
|
if contentLength == 0 {
|
|
|
|
log.Print("Empty content-length")
|
|
|
|
http.Error(w, errors.New("Could not uplpoad empty file").Error(), 400)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-16 20:01:43 +02:00
|
|
|
contentType := r.Header.Get("Content-Type")
|
|
|
|
|
|
|
|
if contentType == "" {
|
|
|
|
contentType = mime.TypeByExtension(filepath.Ext(vars["filename"]))
|
|
|
|
}
|
|
|
|
|
2014-10-20 14:54:42 +02:00
|
|
|
token := Encode(10000000 + int64(rand.Intn(1000000000)))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
metadata := MetadataForRequest(contentType, r)
|
|
|
|
|
|
|
|
buffer := &bytes.Buffer{}
|
|
|
|
if err := json.NewEncoder(buffer).Encode(metadata); err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, errors.New("Could not encode metadata").Error(), 500)
|
|
|
|
return
|
|
|
|
} else if err := s.storage.Put(token, fmt.Sprintf("%s.metadata", filename), buffer, "text/json", uint64(buffer.Len())); err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, errors.New("Could not save metadata").Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2016-01-29 16:40:04 +01:00
|
|
|
log.Printf("Uploading %s %s %d %s", token, filename, contentLength, contentType)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
var err error
|
|
|
|
|
2017-03-22 22:27:26 +01:00
|
|
|
if err = s.storage.Put(token, filename, reader, contentType, uint64(contentLength)); err != nil {
|
|
|
|
log.Printf("Error putting new file: %s", err.Error())
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, errors.New("Could not save file").Error(), 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// w.Statuscode = 200
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "text/plain")
|
|
|
|
|
2017-03-22 22:23:29 +01:00
|
|
|
relativeURL, _ := url.Parse(path.Join(token, filename))
|
2018-07-07 20:23:50 +02:00
|
|
|
deleteUrl, _ := url.Parse(path.Join(token, filename, metadata.DeletionToken))
|
2018-06-23 11:47:27 +02:00
|
|
|
|
2018-06-24 06:46:57 +02:00
|
|
|
w.Header().Set("X-Url-Delete", resolveUrl(r, deleteUrl, true))
|
|
|
|
|
|
|
|
fmt.Fprint(w, resolveUrl(r, relativeURL, false))
|
2018-06-23 11:47:27 +02:00
|
|
|
}
|
|
|
|
|
2018-06-24 06:46:57 +02:00
|
|
|
func resolveUrl(r *http.Request, u *url.URL, absolutePath bool) string {
|
2018-06-23 11:47:27 +02:00
|
|
|
if u.RawQuery != "" {
|
|
|
|
u.Path = fmt.Sprintf("%s?%s", u.Path, url.QueryEscape(u.RawQuery))
|
|
|
|
u.RawQuery = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
if u.Fragment != "" {
|
|
|
|
u.Path = fmt.Sprintf("%s#%s", u.Path, u.Fragment)
|
|
|
|
u.Fragment = ""
|
|
|
|
}
|
|
|
|
|
2018-06-24 06:46:57 +02:00
|
|
|
if absolutePath {
|
|
|
|
r.URL.Path = ""
|
|
|
|
}
|
|
|
|
|
2018-06-23 11:47:27 +02:00
|
|
|
return getURL(r).ResolveReference(u).String()
|
2017-03-22 22:23:29 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func getURL(r *http.Request) *url.URL {
|
|
|
|
u := *r.URL
|
|
|
|
|
|
|
|
if r.TLS != nil {
|
|
|
|
u.Scheme = "https"
|
|
|
|
} else if proto := r.Header.Get("X-Forwarded-Proto"); proto != "" {
|
|
|
|
u.Scheme = proto
|
|
|
|
} else {
|
|
|
|
u.Scheme = "http"
|
|
|
|
}
|
|
|
|
|
|
|
|
if u.Host != "" {
|
|
|
|
} else if host, port, err := net.SplitHostPort(r.Host); err != nil {
|
2017-03-22 23:39:59 +01:00
|
|
|
u.Host = r.Host
|
2017-03-22 22:23:29 +01:00
|
|
|
} else {
|
|
|
|
if port == "80" && u.Scheme == "http" {
|
|
|
|
u.Host = host
|
|
|
|
} else if port == "443" && u.Scheme == "https" {
|
|
|
|
u.Host = host
|
|
|
|
} else {
|
|
|
|
u.Host = net.JoinHostPort(host, port)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return &u
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
func (s *Server) Lock(token, filename string) error {
|
|
|
|
key := path.Join(token, filename)
|
|
|
|
|
|
|
|
if _, ok := s.locks[key]; !ok {
|
|
|
|
s.locks[key] = &sync.Mutex{}
|
|
|
|
}
|
|
|
|
|
|
|
|
s.locks[key].Lock()
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Server) Unlock(token, filename string) error {
|
|
|
|
key := path.Join(token, filename)
|
|
|
|
s.locks[key].Unlock()
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Server) CheckMetadata(token, filename string) error {
|
|
|
|
s.Lock(token, filename)
|
|
|
|
defer s.Unlock(token, filename)
|
|
|
|
|
|
|
|
var metadata Metadata
|
|
|
|
|
|
|
|
r, _, _, err := s.storage.Get(token, fmt.Sprintf("%s.metadata", filename))
|
|
|
|
if s.storage.IsNotExist(err) {
|
|
|
|
return nil
|
|
|
|
} else if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
defer r.Close()
|
|
|
|
|
|
|
|
if err := json.NewDecoder(r).Decode(&metadata); err != nil {
|
|
|
|
return err
|
|
|
|
} else if metadata.Downloads >= metadata.MaxDownloads {
|
|
|
|
return errors.New("MaxDownloads expired.")
|
|
|
|
} else if time.Now().After(metadata.MaxDate) {
|
|
|
|
return errors.New("MaxDate expired.")
|
|
|
|
} else {
|
|
|
|
// todo(nl5887): mutex?
|
|
|
|
|
|
|
|
// update number of downloads
|
|
|
|
metadata.Downloads++
|
|
|
|
|
|
|
|
buffer := &bytes.Buffer{}
|
|
|
|
if err := json.NewEncoder(buffer).Encode(metadata); err != nil {
|
|
|
|
return errors.New("Could not encode metadata")
|
|
|
|
} else if err := s.storage.Put(token, fmt.Sprintf("%s.metadata", filename), buffer, "text/json", uint64(buffer.Len())); err != nil {
|
|
|
|
return errors.New("Could not save metadata")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-06-24 06:46:57 +02:00
|
|
|
func (s *Server) CheckDeletionToken(deletionToken, token, filename string) error {
|
|
|
|
s.Lock(token, filename)
|
|
|
|
defer s.Unlock(token, filename)
|
|
|
|
|
|
|
|
var metadata Metadata
|
|
|
|
|
|
|
|
r, _, _, err := s.storage.Get(token, fmt.Sprintf("%s.metadata", filename))
|
|
|
|
if s.storage.IsNotExist(err) {
|
|
|
|
return nil
|
|
|
|
} else if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
defer r.Close()
|
|
|
|
|
|
|
|
if err := json.NewDecoder(r).Decode(&metadata); err != nil {
|
|
|
|
return err
|
|
|
|
} else if metadata.DeletionToken != deletionToken {
|
|
|
|
return errors.New("Deletion token doesn't match.")
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s *Server) deleteHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
token := vars["token"]
|
|
|
|
filename := vars["filename"]
|
|
|
|
deletionToken := vars["deletionToken"]
|
|
|
|
|
|
|
|
if err := s.CheckDeletionToken(deletionToken, token, filename); err != nil {
|
|
|
|
log.Printf("Error metadata: %s", err.Error())
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
err := s.storage.Delete(token, filename)
|
|
|
|
if s.storage.IsNotExist(err) {
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
} else if err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Could not delete file.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) zipHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
files := vars["files"]
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
zipfilename := fmt.Sprintf("transfersh-%d.zip", uint16(time.Now().UnixNano()))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/zip")
|
2014-10-20 13:38:40 +02:00
|
|
|
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", zipfilename))
|
2014-10-16 20:01:43 +02:00
|
|
|
w.Header().Set("Connection", "close")
|
|
|
|
|
|
|
|
zw := zip.NewWriter(w)
|
|
|
|
|
|
|
|
for _, key := range strings.Split(files, ",") {
|
2014-11-13 21:41:43 +01:00
|
|
|
if strings.HasPrefix(key, "/") {
|
|
|
|
key = key[1:]
|
|
|
|
}
|
|
|
|
|
|
|
|
key = strings.Replace(key, "\\", "/", -1)
|
|
|
|
|
|
|
|
token := strings.Split(key, "/")[0]
|
2017-03-23 11:46:59 +01:00
|
|
|
filename := sanitize(strings.Split(key, "/")[1])
|
2014-10-20 13:38:40 +02:00
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
if err := s.CheckMetadata(token, filename); err != nil {
|
|
|
|
log.Printf("Error metadata: %s", err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2017-03-22 22:27:26 +01:00
|
|
|
reader, _, _, err := s.storage.Get(token, filename)
|
2014-11-13 21:41:43 +01:00
|
|
|
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
2017-03-22 22:27:26 +01:00
|
|
|
if s.storage.IsNotExist(err) {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, "File not found", 404)
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Could not retrieve file.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-20 14:54:42 +02:00
|
|
|
defer reader.Close()
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
header := &zip.FileHeader{
|
|
|
|
Name: strings.Split(key, "/")[1],
|
|
|
|
Method: zip.Store,
|
|
|
|
ModifiedTime: uint16(time.Now().UnixNano()),
|
|
|
|
ModifiedDate: uint16(time.Now().UnixNano()),
|
|
|
|
}
|
|
|
|
|
|
|
|
fw, err := zw.CreateHeader(header)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Internal server error.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
if _, err = io.Copy(fw, reader); err != nil {
|
2014-10-16 20:01:43 +02:00
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Internal server error.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := zw.Close(); err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Internal server error.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) tarGzHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
files := vars["files"]
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
tarfilename := fmt.Sprintf("transfersh-%d.tar.gz", uint16(time.Now().UnixNano()))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/x-gzip")
|
2014-10-20 13:38:40 +02:00
|
|
|
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", tarfilename))
|
2014-10-16 20:01:43 +02:00
|
|
|
w.Header().Set("Connection", "close")
|
|
|
|
|
|
|
|
os := gzip.NewWriter(w)
|
|
|
|
defer os.Close()
|
|
|
|
|
|
|
|
zw := tar.NewWriter(os)
|
|
|
|
defer zw.Close()
|
|
|
|
|
|
|
|
for _, key := range strings.Split(files, ",") {
|
2014-11-13 21:41:43 +01:00
|
|
|
if strings.HasPrefix(key, "/") {
|
|
|
|
key = key[1:]
|
|
|
|
}
|
|
|
|
|
|
|
|
key = strings.Replace(key, "\\", "/", -1)
|
|
|
|
|
2014-10-20 14:54:42 +02:00
|
|
|
token := strings.Split(key, "/")[0]
|
2017-03-23 11:46:59 +01:00
|
|
|
filename := sanitize(strings.Split(key, "/")[1])
|
2014-10-20 13:38:40 +02:00
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
if err := s.CheckMetadata(token, filename); err != nil {
|
|
|
|
log.Printf("Error metadata: %s", err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2017-03-22 22:27:26 +01:00
|
|
|
reader, _, contentLength, err := s.storage.Get(token, filename)
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
2017-03-22 22:27:26 +01:00
|
|
|
if s.storage.IsNotExist(err) {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, "File not found", 404)
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Could not retrieve file.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-20 14:54:42 +02:00
|
|
|
defer reader.Close()
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
header := &tar.Header{
|
|
|
|
Name: strings.Split(key, "/")[1],
|
|
|
|
Size: int64(contentLength),
|
|
|
|
}
|
|
|
|
|
|
|
|
err = zw.WriteHeader(header)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Internal server error.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
if _, err = io.Copy(zw, reader); err != nil {
|
2014-10-16 20:01:43 +02:00
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Internal server error.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) tarHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
files := vars["files"]
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
tarfilename := fmt.Sprintf("transfersh-%d.tar", uint16(time.Now().UnixNano()))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/x-tar")
|
2014-10-20 13:38:40 +02:00
|
|
|
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", tarfilename))
|
2014-10-16 20:01:43 +02:00
|
|
|
w.Header().Set("Connection", "close")
|
|
|
|
|
|
|
|
zw := tar.NewWriter(w)
|
|
|
|
defer zw.Close()
|
|
|
|
|
|
|
|
for _, key := range strings.Split(files, ",") {
|
2014-10-20 14:54:42 +02:00
|
|
|
token := strings.Split(key, "/")[0]
|
|
|
|
filename := strings.Split(key, "/")[1]
|
2014-10-20 13:38:40 +02:00
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
if err := s.CheckMetadata(token, filename); err != nil {
|
|
|
|
log.Printf("Error metadata: %s", err.Error())
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2017-03-22 22:27:26 +01:00
|
|
|
reader, _, contentLength, err := s.storage.Get(token, filename)
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
2017-03-22 22:27:26 +01:00
|
|
|
if s.storage.IsNotExist(err) {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, "File not found", 404)
|
|
|
|
return
|
|
|
|
} else {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Could not retrieve file.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-10-20 14:54:42 +02:00
|
|
|
defer reader.Close()
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
header := &tar.Header{
|
|
|
|
Name: strings.Split(key, "/")[1],
|
|
|
|
Size: int64(contentLength),
|
|
|
|
}
|
|
|
|
|
|
|
|
err = zw.WriteHeader(header)
|
|
|
|
if err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Internal server error.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
if _, err = io.Copy(zw, reader); err != nil {
|
2014-10-16 20:01:43 +02:00
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Internal server error.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-13 08:56:08 +02:00
|
|
|
func (s *Server) headHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
token := vars["token"]
|
|
|
|
filename := vars["filename"]
|
|
|
|
|
|
|
|
if err := s.CheckMetadata(token, filename); err != nil {
|
|
|
|
log.Printf("Error metadata: %s", err.Error())
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
contentType, contentLength, err := s.storage.Head(token, filename)
|
|
|
|
if s.storage.IsNotExist(err) {
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
} else if err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Could not retrieve file.", 500)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", contentType)
|
|
|
|
w.Header().Set("Content-Length", strconv.FormatUint(contentLength, 10))
|
|
|
|
w.Header().Set("Connection", "close")
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) getHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
2018-08-13 08:56:08 +02:00
|
|
|
action := vars["action"]
|
2014-10-16 20:01:43 +02:00
|
|
|
token := vars["token"]
|
|
|
|
filename := vars["filename"]
|
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
if err := s.CheckMetadata(token, filename); err != nil {
|
|
|
|
log.Printf("Error metadata: %s", err.Error())
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-03-22 22:27:26 +01:00
|
|
|
reader, contentType, contentLength, err := s.storage.Get(token, filename)
|
2017-03-28 16:12:31 +02:00
|
|
|
if s.storage.IsNotExist(err) {
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
} else if err != nil {
|
|
|
|
log.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Could not retrieve file.", 500)
|
|
|
|
return
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2014-10-20 14:54:42 +02:00
|
|
|
defer reader.Close()
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2018-08-13 08:56:08 +02:00
|
|
|
var disposition string
|
|
|
|
|
|
|
|
if action == "inline" {
|
|
|
|
disposition = "inline"
|
|
|
|
} else {
|
|
|
|
disposition = "attachment"
|
|
|
|
}
|
|
|
|
|
2014-10-16 20:01:43 +02:00
|
|
|
w.Header().Set("Content-Type", contentType)
|
2014-10-20 14:54:42 +02:00
|
|
|
w.Header().Set("Content-Length", strconv.FormatUint(contentLength, 10))
|
2018-08-13 08:56:08 +02:00
|
|
|
w.Header().Set("Content-Disposition", fmt.Sprintf("%s; filename=\"%s\"", disposition, filename))
|
|
|
|
w.Header().Set("Connection", "keep-alive")
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
if _, err = io.Copy(w, reader); err != nil {
|
2014-11-13 21:41:43 +01:00
|
|
|
log.Printf("%s", err.Error())
|
2016-08-10 20:50:36 +02:00
|
|
|
http.Error(w, "Error occurred copying to output stream", 500)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) RedirectHandler(h http.Handler) http.HandlerFunc {
|
2014-10-16 20:01:43 +02:00
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
2017-03-22 18:09:21 +01:00
|
|
|
if !s.forceHTTPs {
|
|
|
|
// we don't want to enforce https
|
|
|
|
} else if r.URL.Path == "/health.html" {
|
|
|
|
// health check url won't redirect
|
|
|
|
} else if strings.HasSuffix(ipAddrFromRemoteAddr(r.Host), ".onion") {
|
|
|
|
// .onion addresses cannot get a valid certificate, so don't redirect
|
|
|
|
} else if r.Header.Get("X-Forwarded-Proto") == "https" {
|
|
|
|
} else if r.URL.Scheme == "https" {
|
|
|
|
} else {
|
2017-03-22 23:39:59 +01:00
|
|
|
u := getURL(r)
|
2017-03-22 18:09:21 +01:00
|
|
|
u.Scheme = "https"
|
|
|
|
|
|
|
|
http.Redirect(w, r, u.String(), http.StatusPermanentRedirect)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create a log handler for every request it receives.
|
|
|
|
func LoveHandler(h http.Handler) http.HandlerFunc {
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
w.Header().Set("x-made-with", "<3 by DutchCoders")
|
|
|
|
w.Header().Set("x-served-by", "Proudly served by DutchCoders")
|
|
|
|
w.Header().Set("Server", "Transfer.sh HTTP Server 1.0")
|
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
}
|
|
|
|
}
|
2018-06-23 18:46:28 +02:00
|
|
|
|
|
|
|
func (s *Server) BasicAuthHandler(h http.Handler) http.HandlerFunc {
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
if s.AuthUser == "" || s.AuthPass == "" {
|
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("WWW-Authenticate", "Basic realm=\"Restricted\"")
|
|
|
|
|
|
|
|
username, password, authOK := r.BasicAuth()
|
|
|
|
if authOK == false {
|
|
|
|
http.Error(w, "Not authorized", 401)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if username != s.AuthUser || password != s.AuthPass {
|
|
|
|
http.Error(w, "Not authorized", 401)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
}
|
|
|
|
}
|