2014-10-16 20:01:43 +02:00
|
|
|
/*
|
|
|
|
The MIT License (MIT)
|
|
|
|
|
2017-03-23 00:02:36 +01:00
|
|
|
Copyright (c) 2014-2017 DutchCoders [https://github.com/dutchcoders/]
|
2021-07-10 20:20:19 +02:00
|
|
|
Copyright (c) 2018-2020 Andrea Spacca.
|
|
|
|
Copyright (c) 2020- Andrea Spacca and Stefan Benten.
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
of this software and associated documentation files (the "Software"), to deal
|
|
|
|
in the Software without restriction, including without limitation the rights
|
|
|
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
copies of the Software, and to permit persons to whom the Software is
|
|
|
|
furnished to do so, subject to the following conditions:
|
|
|
|
|
|
|
|
The above copyright notice and this permission notice shall be included in
|
|
|
|
all copies or substantial portions of the Software.
|
|
|
|
|
|
|
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
|
|
THE SOFTWARE.
|
|
|
|
*/
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
package server
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
import (
|
|
|
|
"archive/tar"
|
|
|
|
"archive/zip"
|
|
|
|
"bytes"
|
|
|
|
"compress/gzip"
|
2021-12-26 17:17:28 +01:00
|
|
|
"context"
|
|
|
|
"encoding/base64"
|
2017-03-28 16:12:31 +02:00
|
|
|
"encoding/json"
|
2014-10-16 20:01:43 +02:00
|
|
|
"errors"
|
|
|
|
"fmt"
|
2016-02-19 10:40:26 +01:00
|
|
|
"html"
|
2022-07-14 18:02:18 +02:00
|
|
|
htmlTemplate "html/template"
|
2014-10-16 20:01:43 +02:00
|
|
|
"io"
|
|
|
|
"mime"
|
2021-12-26 17:17:28 +01:00
|
|
|
"net"
|
2014-10-16 20:01:43 +02:00
|
|
|
"net/http"
|
2017-03-22 22:23:29 +01:00
|
|
|
"net/url"
|
2014-10-16 20:01:43 +02:00
|
|
|
"os"
|
2017-03-22 22:23:29 +01:00
|
|
|
"path"
|
2014-10-16 20:01:43 +02:00
|
|
|
"path/filepath"
|
2014-10-20 14:54:42 +02:00
|
|
|
"strconv"
|
2014-10-16 20:01:43 +02:00
|
|
|
"strings"
|
2017-03-28 16:12:31 +02:00
|
|
|
"sync"
|
2022-07-14 18:02:18 +02:00
|
|
|
textTemplate "text/template"
|
2014-10-20 14:54:42 +02:00
|
|
|
"time"
|
2016-01-14 09:33:02 +01:00
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
"github.com/ProtonMail/go-crypto/openpgp"
|
|
|
|
"github.com/ProtonMail/go-crypto/openpgp/armor"
|
|
|
|
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
|
|
|
"github.com/ProtonMail/gopenpgp/v2/constants"
|
2023-03-10 17:41:43 +01:00
|
|
|
"github.com/dutchcoders/transfer.sh/server/storage"
|
2023-03-12 03:52:45 +01:00
|
|
|
"github.com/tg123/go-htpasswd"
|
2023-03-12 05:34:41 +01:00
|
|
|
"github.com/tomasen/realip"
|
2023-03-10 17:41:43 +01:00
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
web "github.com/dutchcoders/transfer.sh-web"
|
2016-01-14 09:33:02 +01:00
|
|
|
"github.com/gorilla/mux"
|
2019-05-11 14:42:59 +02:00
|
|
|
"github.com/microcosm-cc/bluemonday"
|
2023-03-10 17:41:43 +01:00
|
|
|
blackfriday "github.com/russross/blackfriday/v2"
|
|
|
|
qrcode "github.com/skip2/go-qrcode"
|
2022-05-15 16:48:13 +02:00
|
|
|
"golang.org/x/net/idna"
|
2014-10-16 20:01:43 +02:00
|
|
|
)
|
|
|
|
|
2019-05-18 14:13:23 +02:00
|
|
|
const getPathPart = "get"
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
var (
|
2017-03-22 22:25:42 +01:00
|
|
|
htmlTemplates = initHTMLTemplates()
|
|
|
|
textTemplates = initTextTemplates()
|
2017-03-22 18:09:21 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
func stripPrefix(path string) string {
|
|
|
|
return strings.Replace(path, web.Prefix+"/", "", -1)
|
|
|
|
}
|
|
|
|
|
2022-07-14 18:02:18 +02:00
|
|
|
func initTextTemplates() *textTemplate.Template {
|
|
|
|
templateMap := textTemplate.FuncMap{"format": formatNumber}
|
2017-03-22 18:09:21 +01:00
|
|
|
|
|
|
|
// Templates with functions available to them
|
2022-07-14 18:02:18 +02:00
|
|
|
var templates = textTemplate.New("").Funcs(templateMap)
|
2017-03-22 18:09:21 +01:00
|
|
|
return templates
|
|
|
|
}
|
|
|
|
|
2022-07-14 18:02:18 +02:00
|
|
|
func initHTMLTemplates() *htmlTemplate.Template {
|
|
|
|
templateMap := htmlTemplate.FuncMap{"format": formatNumber}
|
2017-03-22 18:09:21 +01:00
|
|
|
|
|
|
|
// Templates with functions available to them
|
2022-07-14 18:02:18 +02:00
|
|
|
var templates = htmlTemplate.New("").Funcs(templateMap)
|
2017-03-22 18:09:21 +01:00
|
|
|
|
|
|
|
return templates
|
|
|
|
}
|
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
func attachEncryptionReader(reader io.ReadCloser, password string) (io.ReadCloser, error) {
|
|
|
|
if len(password) == 0 {
|
|
|
|
return reader, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return encrypt(reader, []byte(password))
|
|
|
|
}
|
|
|
|
|
|
|
|
func attachDecryptionReader(reader io.ReadCloser, password string) (io.ReadCloser, error) {
|
|
|
|
if len(password) == 0 {
|
|
|
|
return reader, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return decrypt(reader, []byte(password))
|
|
|
|
}
|
|
|
|
|
|
|
|
func decrypt(ciphertext io.ReadCloser, password []byte) (plaintext io.ReadCloser, err error) {
|
|
|
|
unarmored, err := armor.Decode(ciphertext)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
firstTimeCalled := true
|
|
|
|
var prompt = func(keys []openpgp.Key, symmetric bool) ([]byte, error) {
|
|
|
|
if firstTimeCalled {
|
|
|
|
firstTimeCalled = false
|
|
|
|
return password, nil
|
|
|
|
}
|
|
|
|
// Re-prompt still occurs if SKESK pasrsing fails (i.e. when decrypted cipher algo is invalid).
|
|
|
|
// For most (but not all) cases, inputting a wrong passwords is expected to trigger this error.
|
|
|
|
return nil, errors.New("gopenpgp: wrong password in symmetric decryption")
|
|
|
|
}
|
|
|
|
|
|
|
|
config := &packet.Config{
|
|
|
|
DefaultCipher: packet.CipherAES256,
|
|
|
|
}
|
|
|
|
|
|
|
|
var emptyKeyRing openpgp.EntityList
|
|
|
|
md, err := openpgp.ReadMessage(unarmored.Body, emptyKeyRing, prompt, config)
|
|
|
|
if err != nil {
|
|
|
|
// Parsing errors when reading the message are most likely caused by incorrect password, but we cannot know for sure
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
plaintext = io.NopCloser(md.UnverifiedBody)
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
type encryptWrapperReader struct {
|
|
|
|
plaintext io.Reader
|
|
|
|
encrypt io.WriteCloser
|
|
|
|
armored io.WriteCloser
|
|
|
|
buffer io.ReadWriter
|
|
|
|
plaintextReadZero bool
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *encryptWrapperReader) Read(p []byte) (n int, err error) {
|
|
|
|
p2 := make([]byte, len(p))
|
|
|
|
|
|
|
|
n, _ = e.plaintext.Read(p2)
|
|
|
|
if n == 0 {
|
|
|
|
if !e.plaintextReadZero {
|
|
|
|
err = e.encrypt.Close()
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
err = e.armored.Close()
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
e.plaintextReadZero = true
|
|
|
|
}
|
|
|
|
|
|
|
|
return e.buffer.Read(p)
|
|
|
|
}
|
|
|
|
|
|
|
|
return e.buffer.Read(p)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (e *encryptWrapperReader) Close() error {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func NewEncryptWrapperReader(plaintext io.Reader, armored, encrypt io.WriteCloser, buffer io.ReadWriter) io.ReadCloser {
|
|
|
|
return &encryptWrapperReader{
|
|
|
|
plaintext: io.TeeReader(plaintext, encrypt),
|
|
|
|
encrypt: encrypt,
|
|
|
|
armored: armored,
|
|
|
|
buffer: buffer,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func encrypt(plaintext io.ReadCloser, password []byte) (ciphertext io.ReadCloser, err error) {
|
|
|
|
bufferReadWriter := new(bytes.Buffer)
|
|
|
|
armored, err := armor.Encode(bufferReadWriter, constants.PGPMessageHeader, nil)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
config := &packet.Config{
|
|
|
|
DefaultCipher: packet.CipherAES256,
|
|
|
|
Time: time.Now,
|
|
|
|
}
|
|
|
|
|
|
|
|
hints := &openpgp.FileHints{
|
|
|
|
IsBinary: true,
|
|
|
|
FileName: "",
|
|
|
|
ModTime: time.Unix(time.Now().Unix(), 0),
|
|
|
|
}
|
|
|
|
|
|
|
|
encryptWriter, err := openpgp.SymmetricallyEncrypt(armored, password, hints, config)
|
|
|
|
if err != nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
ciphertext = NewEncryptWrapperReader(plaintext, armored, encryptWriter, bufferReadWriter)
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-07-14 18:02:18 +02:00
|
|
|
func healthHandler(w http.ResponseWriter, _ *http.Request) {
|
2021-12-26 14:37:22 +01:00
|
|
|
_, _ = w.Write([]byte("Approaching Neutral Zone, all systems normal and functioning."))
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2021-05-21 15:49:48 +02:00
|
|
|
func canContainsXSS(contentType string) bool {
|
|
|
|
switch {
|
|
|
|
case strings.Contains(contentType, "cache-manifest"):
|
|
|
|
fallthrough
|
|
|
|
case strings.Contains(contentType, "html"):
|
|
|
|
fallthrough
|
|
|
|
case strings.Contains(contentType, "rdf"):
|
|
|
|
fallthrough
|
|
|
|
case strings.Contains(contentType, "vtt"):
|
|
|
|
fallthrough
|
|
|
|
case strings.Contains(contentType, "xml"):
|
|
|
|
fallthrough
|
|
|
|
case strings.Contains(contentType, "xsl"):
|
|
|
|
return true
|
|
|
|
case strings.Contains(contentType, "x-mixed-replace"):
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
/* The preview handler will show a preview of the content for browsers (accept type text/html), and referer is not transfer.sh */
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) previewHandler(w http.ResponseWriter, r *http.Request) {
|
2023-03-16 02:25:46 +01:00
|
|
|
w.Header().Set("Vary", "Range, Referer, X-Decrypt-Password")
|
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
token := vars["token"]
|
|
|
|
filename := vars["filename"]
|
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
metadata, err := s.checkMetadata(r.Context(), token, filename, false)
|
2019-07-06 20:13:20 +02:00
|
|
|
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error metadata: %s", err.Error())
|
2019-07-06 20:13:20 +02:00
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-03-15 19:35:43 +01:00
|
|
|
contentType := metadata.ContentType
|
2021-12-26 17:17:28 +01:00
|
|
|
contentLength, err := s.storage.Head(r.Context(), token, filename)
|
2014-11-13 21:41:43 +01:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, http.StatusText(404), 404)
|
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
var templatePath string
|
2022-07-14 18:02:18 +02:00
|
|
|
var content htmlTemplate.HTML
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
switch {
|
|
|
|
case strings.HasPrefix(contentType, "image/"):
|
|
|
|
templatePath = "download.image.html"
|
|
|
|
case strings.HasPrefix(contentType, "video/"):
|
|
|
|
templatePath = "download.video.html"
|
|
|
|
case strings.HasPrefix(contentType, "audio/"):
|
|
|
|
templatePath = "download.audio.html"
|
|
|
|
case strings.HasPrefix(contentType, "text/"):
|
|
|
|
templatePath = "download.markdown.html"
|
|
|
|
|
|
|
|
var reader io.ReadCloser
|
2023-03-10 17:41:43 +01:00
|
|
|
if reader, _, err = s.storage.Get(r.Context(), token, filename, nil); err != nil {
|
2014-11-13 21:41:43 +01:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
var data []byte
|
2019-04-28 21:36:45 +02:00
|
|
|
data = make([]byte, _5M)
|
|
|
|
if _, err = reader.Read(data); err != io.EOF && err != nil {
|
2014-11-13 21:41:43 +01:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
2014-11-13 21:41:43 +01:00
|
|
|
|
|
|
|
if strings.HasPrefix(contentType, "text/x-markdown") || strings.HasPrefix(contentType, "text/markdown") {
|
2019-05-11 14:47:58 +02:00
|
|
|
unsafe := blackfriday.Run(data)
|
2019-05-11 14:42:59 +02:00
|
|
|
output := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
|
2022-07-14 18:02:18 +02:00
|
|
|
content = htmlTemplate.HTML(output)
|
2014-11-13 21:41:43 +01:00
|
|
|
} else if strings.HasPrefix(contentType, "text/plain") {
|
2022-07-14 18:02:18 +02:00
|
|
|
content = htmlTemplate.HTML(fmt.Sprintf("<pre>%s</pre>", html.EscapeString(string(data))))
|
2014-11-13 21:41:43 +01:00
|
|
|
} else {
|
2016-01-14 09:33:02 +01:00
|
|
|
templatePath = "download.sandbox.html"
|
2014-11-13 21:41:43 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
templatePath = "download.html"
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2019-05-18 14:13:23 +02:00
|
|
|
relativeURL, _ := url.Parse(path.Join(s.proxyPath, token, filename))
|
2020-05-03 11:19:32 +02:00
|
|
|
resolvedURL := resolveURL(r, relativeURL, s.proxyPort)
|
2019-05-18 14:13:23 +02:00
|
|
|
relativeURLGet, _ := url.Parse(path.Join(s.proxyPath, getPathPart, token, filename))
|
2020-05-03 11:19:32 +02:00
|
|
|
resolvedURLGet := resolveURL(r, relativeURLGet, s.proxyPort)
|
2018-07-01 13:51:13 +02:00
|
|
|
var png []byte
|
2019-05-18 14:13:23 +02:00
|
|
|
png, err = qrcode.Encode(resolvedURL, qrcode.High, 150)
|
2018-07-01 13:51:13 +02:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
qrCode := base64.StdEncoding.EncodeToString(png)
|
|
|
|
|
2020-05-03 11:19:32 +02:00
|
|
|
hostname := getURL(r, s.proxyPort).Host
|
|
|
|
webAddress := resolveWebAddress(r, s.proxyPath, s.proxyPort)
|
2018-12-15 21:16:30 +01:00
|
|
|
|
2014-11-13 21:41:43 +01:00
|
|
|
data := struct {
|
2021-05-09 09:21:54 +02:00
|
|
|
ContentType string
|
2022-07-14 18:02:18 +02:00
|
|
|
Content htmlTemplate.HTML
|
2021-05-09 09:21:54 +02:00
|
|
|
Filename string
|
2021-08-19 22:45:30 +02:00
|
|
|
URL string
|
|
|
|
URLGet string
|
|
|
|
URLRandomToken string
|
2021-05-09 09:21:54 +02:00
|
|
|
Hostname string
|
|
|
|
WebAddress string
|
|
|
|
ContentLength uint64
|
|
|
|
GAKey string
|
|
|
|
UserVoiceKey string
|
|
|
|
QRCode string
|
2014-11-13 21:41:43 +01:00
|
|
|
}{
|
|
|
|
contentType,
|
|
|
|
content,
|
|
|
|
filename,
|
2019-05-18 14:13:23 +02:00
|
|
|
resolvedURL,
|
|
|
|
resolvedURLGet,
|
2021-05-09 09:21:54 +02:00
|
|
|
token,
|
2018-12-15 21:16:30 +01:00
|
|
|
hostname,
|
|
|
|
webAddress,
|
2014-11-13 21:41:43 +01:00
|
|
|
contentLength,
|
2018-06-26 18:39:56 +02:00
|
|
|
s.gaKey,
|
|
|
|
s.userVoiceKey,
|
2018-07-01 13:51:13 +02:00
|
|
|
qrCode,
|
2014-11-13 21:41:43 +01:00
|
|
|
}
|
|
|
|
|
2017-03-22 22:25:42 +01:00
|
|
|
if err := htmlTemplates.ExecuteTemplate(w, templatePath, data); err != nil {
|
2014-11-13 21:41:43 +01:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
// this handler will output html or text, depending on the
|
|
|
|
// support of the client (Accept header).
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) viewHandler(w http.ResponseWriter, r *http.Request) {
|
2014-11-13 21:41:43 +01:00
|
|
|
// vars := mux.Vars(r)
|
|
|
|
|
2020-05-03 11:19:32 +02:00
|
|
|
hostname := getURL(r, s.proxyPort).Host
|
|
|
|
webAddress := resolveWebAddress(r, s.proxyPath, s.proxyPort)
|
2018-12-15 21:16:30 +01:00
|
|
|
|
2021-07-23 11:20:49 +02:00
|
|
|
maxUploadSize := ""
|
|
|
|
if s.maxUploadSize > 0 {
|
|
|
|
maxUploadSize = formatSize(s.maxUploadSize)
|
|
|
|
}
|
|
|
|
|
2021-07-17 10:13:45 +02:00
|
|
|
purgeTime := ""
|
|
|
|
if s.purgeDays > 0 {
|
2023-05-19 12:01:54 +02:00
|
|
|
purgeTime = formatDurationDays(s.purgeDays)
|
2021-07-17 10:13:45 +02:00
|
|
|
}
|
|
|
|
|
2018-12-15 21:16:30 +01:00
|
|
|
data := struct {
|
2021-07-23 11:21:05 +02:00
|
|
|
Hostname string
|
|
|
|
WebAddress string
|
2021-12-15 21:06:54 +01:00
|
|
|
EmailContact string
|
2021-07-23 11:21:05 +02:00
|
|
|
GAKey string
|
|
|
|
UserVoiceKey string
|
|
|
|
PurgeTime string
|
2021-07-23 11:20:49 +02:00
|
|
|
MaxUploadSize string
|
2021-07-23 11:21:05 +02:00
|
|
|
SampleToken string
|
|
|
|
SampleToken2 string
|
2018-12-15 21:16:30 +01:00
|
|
|
}{
|
|
|
|
hostname,
|
|
|
|
webAddress,
|
2021-12-15 21:06:54 +01:00
|
|
|
s.emailContact,
|
2018-12-15 21:16:30 +01:00
|
|
|
s.gaKey,
|
2018-12-15 21:42:18 +01:00
|
|
|
s.userVoiceKey,
|
2021-07-17 10:13:45 +02:00
|
|
|
purgeTime,
|
2021-07-23 11:20:49 +02:00
|
|
|
maxUploadSize,
|
2021-08-19 22:45:30 +02:00
|
|
|
token(s.randomTokenLength),
|
|
|
|
token(s.randomTokenLength),
|
2018-12-15 21:16:30 +01:00
|
|
|
}
|
|
|
|
|
2023-03-16 02:25:46 +01:00
|
|
|
w.Header().Set("Vary", "Accept")
|
2017-03-22 21:09:40 +01:00
|
|
|
if acceptsHTML(r.Header) {
|
2018-12-15 21:16:30 +01:00
|
|
|
if err := htmlTemplates.ExecuteTemplate(w, "index.html", data); err != nil {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
} else {
|
2018-12-15 21:16:30 +01:00
|
|
|
if err := textTemplates.ExecuteTemplate(w, "index.txt", data); err != nil {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-14 18:02:18 +02:00
|
|
|
func (s *Server) notFoundHandler(w http.ResponseWriter, _ *http.Request) {
|
2014-10-20 14:54:42 +02:00
|
|
|
http.Error(w, http.StatusText(404), 404)
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2017-03-23 11:46:59 +01:00
|
|
|
func sanitize(fileName string) string {
|
2021-10-16 09:51:09 +02:00
|
|
|
return path.Base(fileName)
|
2017-03-23 11:46:59 +01:00
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) postHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
if err := r.ParseMultipartForm(_24K); nil != err {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Error occurred copying to output stream", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
token := token(s.randomTokenLength)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "text/plain")
|
|
|
|
|
2022-03-02 13:26:00 +01:00
|
|
|
responseBody := ""
|
|
|
|
|
2022-07-14 18:02:18 +02:00
|
|
|
for _, fHeaders := range r.MultipartForm.File {
|
|
|
|
for _, fHeader := range fHeaders {
|
|
|
|
filename := sanitize(fHeader.Filename)
|
|
|
|
contentType := mime.TypeByExtension(filepath.Ext(fHeader.Filename))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
var f io.Reader
|
|
|
|
var err error
|
|
|
|
|
2022-07-14 18:02:18 +02:00
|
|
|
if f, err = fHeader.Open(); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
file, err := os.CreateTemp(s.tempPath, "transfer-")
|
2022-01-09 22:14:10 +01:00
|
|
|
defer s.cleanTmpFile(file)
|
|
|
|
|
|
|
|
if err != nil {
|
|
|
|
s.logger.Printf("%s", err.Error())
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2022-01-09 22:14:10 +01:00
|
|
|
n, err := io.Copy(file, f)
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-01-09 22:14:10 +01:00
|
|
|
contentLength := n
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2022-01-09 22:14:10 +01:00
|
|
|
_, err = file.Seek(0, io.SeekStart)
|
|
|
|
if err != nil {
|
|
|
|
s.logger.Printf("%s", err.Error())
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if s.maxUploadSize > 0 && contentLength > s.maxUploadSize {
|
|
|
|
s.logger.Print("Entity too large")
|
|
|
|
http.Error(w, http.StatusText(http.StatusRequestEntityTooLarge), http.StatusRequestEntityTooLarge)
|
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2022-01-09 22:14:10 +01:00
|
|
|
if s.performClamavPrescan {
|
|
|
|
status, err := s.performScan(file.Name())
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not perform prescan", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-01-09 22:14:10 +01:00
|
|
|
if status != clamavScanStatusOK {
|
|
|
|
s.logger.Printf("prescan positive: %s", status)
|
|
|
|
http.Error(w, "Clamav prescan found a virus", http.StatusPreconditionFailed)
|
2021-10-16 09:57:59 +02:00
|
|
|
return
|
|
|
|
}
|
2020-12-31 20:08:42 +01:00
|
|
|
}
|
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
metadata := metadataForRequest(contentType, contentLength, s.randomTokenLength, r)
|
2017-03-28 16:12:31 +02:00
|
|
|
|
|
|
|
buffer := &bytes.Buffer{}
|
|
|
|
if err := json.NewEncoder(buffer).Encode(metadata); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not encode metadata", http.StatusInternalServerError)
|
2019-01-16 19:58:11 +01:00
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
return
|
2021-12-26 17:17:28 +01:00
|
|
|
} else if err := s.storage.Put(r.Context(), token, fmt.Sprintf("%s.metadata", filename), buffer, "text/json", uint64(buffer.Len())); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not save metadata", http.StatusInternalServerError)
|
2019-01-16 19:58:11 +01:00
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Uploading %s %s %d %s", token, filename, contentLength, contentType)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
reader, err := attachEncryptionReader(file, r.Header.Get("X-Encrypt-Password"))
|
|
|
|
if err != nil {
|
|
|
|
http.Error(w, "Could not crypt file", http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err = s.storage.Put(r.Context(), token, filename, reader, contentType, uint64(contentLength)); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Backend storage error: %s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
|
|
|
|
}
|
|
|
|
|
2019-06-13 16:35:10 +02:00
|
|
|
filename = url.PathEscape(filename)
|
2019-03-30 12:35:57 +01:00
|
|
|
relativeURL, _ := url.Parse(path.Join(s.proxyPath, token, filename))
|
2022-03-02 13:26:00 +01:00
|
|
|
deleteURL, _ := url.Parse(path.Join(s.proxyPath, token, filename, metadata.DeletionToken))
|
|
|
|
w.Header().Add("X-Url-Delete", resolveURL(r, deleteURL, s.proxyPort))
|
|
|
|
responseBody += fmt.Sprintln(getURL(r, s.proxyPort).ResolveReference(relativeURL).String())
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
}
|
2022-03-02 13:26:00 +01:00
|
|
|
_, err := w.Write([]byte(responseBody))
|
|
|
|
if err != nil {
|
|
|
|
s.logger.Printf("%s", err.Error())
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2021-07-07 21:03:54 +02:00
|
|
|
func (s *Server) cleanTmpFile(f *os.File) {
|
2019-01-16 19:58:11 +01:00
|
|
|
if f != nil {
|
2019-04-28 21:36:45 +02:00
|
|
|
err := f.Close()
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error closing tmpfile: %s (%s)", err, f.Name())
|
2019-04-28 21:36:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
err = os.Remove(f.Name())
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error removing tmpfile: %s (%s)", err, f.Name())
|
2019-04-28 21:36:45 +02:00
|
|
|
}
|
2019-01-16 19:58:11 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
type metadata struct {
|
2017-03-28 16:12:31 +02:00
|
|
|
// ContentType is the original uploading content type
|
|
|
|
ContentType string
|
2023-03-11 02:08:55 +01:00
|
|
|
// ContentLength is is the original uploading content length
|
|
|
|
ContentLength int64
|
2017-03-28 16:12:31 +02:00
|
|
|
// Downloads is the actual number of downloads
|
|
|
|
Downloads int
|
|
|
|
// MaxDownloads contains the maximum numbers of downloads
|
|
|
|
MaxDownloads int
|
|
|
|
// MaxDate contains the max age of the file
|
|
|
|
MaxDate time.Time
|
2018-06-24 06:46:57 +02:00
|
|
|
// DeletionToken contains the token to match against for deletion
|
|
|
|
DeletionToken string
|
2023-03-11 02:08:55 +01:00
|
|
|
// Encrypted contains if the file was encrypted
|
|
|
|
Encrypted bool
|
|
|
|
// DecryptedContentType is the original uploading content type
|
|
|
|
DecryptedContentType string
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
func metadataForRequest(contentType string, contentLength int64, randomTokenLength int, r *http.Request) metadata {
|
2021-08-19 22:45:30 +02:00
|
|
|
metadata := metadata{
|
2021-03-02 16:48:04 +01:00
|
|
|
ContentType: strings.ToLower(contentType),
|
2023-03-11 02:08:55 +01:00
|
|
|
ContentLength: contentLength,
|
2019-07-06 20:33:35 +02:00
|
|
|
MaxDate: time.Time{},
|
2018-06-24 06:46:57 +02:00
|
|
|
Downloads: 0,
|
2019-07-06 20:33:35 +02:00
|
|
|
MaxDownloads: -1,
|
2021-08-19 22:45:30 +02:00
|
|
|
DeletionToken: token(randomTokenLength) + token(randomTokenLength),
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if v := r.Header.Get("Max-Downloads"); v == "" {
|
|
|
|
} else if v, err := strconv.Atoi(v); err != nil {
|
|
|
|
} else {
|
|
|
|
metadata.MaxDownloads = v
|
|
|
|
}
|
|
|
|
|
|
|
|
if v := r.Header.Get("Max-Days"); v == "" {
|
|
|
|
} else if v, err := strconv.Atoi(v); err != nil {
|
|
|
|
} else {
|
|
|
|
metadata.MaxDate = time.Now().Add(time.Hour * 24 * time.Duration(v))
|
|
|
|
}
|
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
if password := r.Header.Get("X-Encrypt-Password"); password != "" {
|
|
|
|
metadata.Encrypted = true
|
|
|
|
metadata.ContentType = "text/plain; charset=utf-8"
|
|
|
|
metadata.DecryptedContentType = contentType
|
|
|
|
} else {
|
|
|
|
metadata.Encrypted = false
|
|
|
|
}
|
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
return metadata
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) putHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
2017-03-23 11:46:59 +01:00
|
|
|
filename := sanitize(vars["filename"])
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
contentLength := r.ContentLength
|
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
defer storage.CloseCheck(r.Body)
|
2019-03-18 19:09:22 +01:00
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
reader := r.Body
|
2019-01-16 19:58:11 +01:00
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
if contentLength < 1 || s.performClamavPrescan {
|
|
|
|
file, err := os.CreateTemp(s.tempPath, "transfer-")
|
|
|
|
defer s.cleanTmpFile(file)
|
|
|
|
if err != nil {
|
|
|
|
s.logger.Printf("%s", err.Error())
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
2021-12-26 14:37:22 +01:00
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
// queue file to disk, because s3 needs content length
|
|
|
|
// and clamav prescan scans a file
|
|
|
|
n, err := io.Copy(file, r.Body)
|
|
|
|
if err != nil {
|
|
|
|
s.logger.Printf("%s", err.Error())
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
_, err = file.Seek(0, io.SeekStart)
|
|
|
|
if err != nil {
|
|
|
|
s.logger.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Cannot reset cache file", http.StatusInternalServerError)
|
|
|
|
|
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
contentLength = n
|
2023-03-10 17:41:43 +01:00
|
|
|
|
|
|
|
if s.performClamavPrescan {
|
|
|
|
status, err := s.performScan(file.Name())
|
|
|
|
if err != nil {
|
|
|
|
s.logger.Printf("%s", err.Error())
|
|
|
|
http.Error(w, "Could not perform prescan", http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if status != clamavScanStatusOK {
|
|
|
|
s.logger.Printf("prescan positive: %s", status)
|
|
|
|
http.Error(w, "Clamav prescan found a virus", http.StatusPreconditionFailed)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
reader = file
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2020-12-31 20:08:42 +01:00
|
|
|
if s.maxUploadSize > 0 && contentLength > s.maxUploadSize {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Print("Entity too large")
|
2020-12-31 20:08:42 +01:00
|
|
|
http.Error(w, http.StatusText(http.StatusRequestEntityTooLarge), http.StatusRequestEntityTooLarge)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2018-07-07 09:55:46 +02:00
|
|
|
if contentLength == 0 {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Print("Empty content-length")
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not upload empty file", http.StatusBadRequest)
|
2018-07-07 09:55:46 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-05-21 15:49:48 +02:00
|
|
|
contentType := mime.TypeByExtension(filepath.Ext(vars["filename"]))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
token := token(s.randomTokenLength)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
metadata := metadataForRequest(contentType, contentLength, s.randomTokenLength, r)
|
2017-03-28 16:12:31 +02:00
|
|
|
|
|
|
|
buffer := &bytes.Buffer{}
|
|
|
|
if err := json.NewEncoder(buffer).Encode(metadata); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not encode metadata", http.StatusInternalServerError)
|
2017-03-28 16:12:31 +02:00
|
|
|
return
|
2021-10-30 10:04:43 +02:00
|
|
|
} else if !metadata.MaxDate.IsZero() && time.Now().After(metadata.MaxDate) {
|
|
|
|
s.logger.Print("Invalid MaxDate")
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Invalid MaxDate, make sure Max-Days is smaller than 290 years", http.StatusBadRequest)
|
2021-10-30 10:04:43 +02:00
|
|
|
return
|
2021-12-26 17:17:28 +01:00
|
|
|
} else if err := s.storage.Put(r.Context(), token, fmt.Sprintf("%s.metadata", filename), buffer, "text/json", uint64(buffer.Len())); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not save metadata", http.StatusInternalServerError)
|
2017-03-28 16:12:31 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Uploading %s %s %d %s", token, filename, contentLength, contentType)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
reader, err := attachEncryptionReader(reader, r.Header.Get("X-Encrypt-Password"))
|
|
|
|
if err != nil {
|
|
|
|
http.Error(w, "Could not crypt file", http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err = s.storage.Put(r.Context(), token, filename, reader, contentType, uint64(contentLength)); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error putting new file: %s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not save file", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// w.Statuscode = 200
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "text/plain")
|
|
|
|
|
2019-06-13 16:35:10 +02:00
|
|
|
filename = url.PathEscape(filename)
|
2019-03-30 12:35:57 +01:00
|
|
|
relativeURL, _ := url.Parse(path.Join(s.proxyPath, token, filename))
|
2019-05-18 14:13:23 +02:00
|
|
|
deleteURL, _ := url.Parse(path.Join(s.proxyPath, token, filename, metadata.DeletionToken))
|
2018-06-23 11:47:27 +02:00
|
|
|
|
2020-05-03 11:19:32 +02:00
|
|
|
w.Header().Set("X-Url-Delete", resolveURL(r, deleteURL, s.proxyPort))
|
2018-06-24 06:46:57 +02:00
|
|
|
|
2021-12-26 14:37:22 +01:00
|
|
|
_, _ = w.Write([]byte(resolveURL(r, relativeURL, s.proxyPort)))
|
2018-06-23 11:47:27 +02:00
|
|
|
}
|
|
|
|
|
2020-05-03 11:19:32 +02:00
|
|
|
func resolveURL(r *http.Request, u *url.URL, proxyPort string) string {
|
2019-07-16 21:23:19 +02:00
|
|
|
r.URL.Path = ""
|
2018-06-24 06:46:57 +02:00
|
|
|
|
2020-05-03 11:19:32 +02:00
|
|
|
return getURL(r, proxyPort).ResolveReference(u).String()
|
2017-03-22 22:23:29 +01:00
|
|
|
}
|
|
|
|
|
2019-03-30 12:35:57 +01:00
|
|
|
func resolveKey(key, proxyPath string) string {
|
2021-12-26 14:03:27 +01:00
|
|
|
key = strings.TrimPrefix(key, "/")
|
2019-03-30 12:35:57 +01:00
|
|
|
|
2021-12-26 14:03:27 +01:00
|
|
|
key = strings.TrimPrefix(key, proxyPath)
|
2019-03-30 12:35:57 +01:00
|
|
|
|
|
|
|
key = strings.Replace(key, "\\", "/", -1)
|
|
|
|
|
|
|
|
return key
|
|
|
|
}
|
|
|
|
|
2020-05-03 11:19:32 +02:00
|
|
|
func resolveWebAddress(r *http.Request, proxyPath string, proxyPort string) string {
|
2021-12-26 14:37:22 +01:00
|
|
|
rUrl := getURL(r, proxyPort)
|
2018-12-15 21:16:30 +01:00
|
|
|
|
2019-03-30 12:35:57 +01:00
|
|
|
var webAddress string
|
|
|
|
|
|
|
|
if len(proxyPath) == 0 {
|
2019-05-18 14:13:23 +02:00
|
|
|
webAddress = fmt.Sprintf("%s://%s/",
|
2021-12-26 14:37:22 +01:00
|
|
|
rUrl.ResolveReference(rUrl).Scheme,
|
|
|
|
rUrl.ResolveReference(rUrl).Host)
|
2019-03-30 12:35:57 +01:00
|
|
|
} else {
|
|
|
|
webAddress = fmt.Sprintf("%s://%s/%s",
|
2021-12-26 14:37:22 +01:00
|
|
|
rUrl.ResolveReference(rUrl).Scheme,
|
|
|
|
rUrl.ResolveReference(rUrl).Host,
|
2019-03-30 12:35:57 +01:00
|
|
|
proxyPath)
|
|
|
|
}
|
|
|
|
|
|
|
|
return webAddress
|
2018-12-15 21:16:30 +01:00
|
|
|
}
|
|
|
|
|
2021-01-06 13:33:46 +01:00
|
|
|
// Similar to the logic found here:
|
|
|
|
// https://github.com/golang/go/blob/release-branch.go1.14/src/net/http/clone.go#L22-L33
|
|
|
|
func cloneURL(u *url.URL) *url.URL {
|
|
|
|
c := &url.URL{}
|
|
|
|
*c = *u
|
|
|
|
|
|
|
|
if u.User != nil {
|
|
|
|
c.User = &url.Userinfo{}
|
|
|
|
*c.User = *u.User
|
|
|
|
}
|
|
|
|
|
|
|
|
return c
|
|
|
|
}
|
|
|
|
|
2020-05-03 11:19:32 +02:00
|
|
|
func getURL(r *http.Request, proxyPort string) *url.URL {
|
2021-01-06 13:33:46 +01:00
|
|
|
u := cloneURL(r.URL)
|
2017-03-22 22:23:29 +01:00
|
|
|
|
|
|
|
if r.TLS != nil {
|
|
|
|
u.Scheme = "https"
|
|
|
|
} else if proto := r.Header.Get("X-Forwarded-Proto"); proto != "" {
|
|
|
|
u.Scheme = proto
|
|
|
|
} else {
|
|
|
|
u.Scheme = "http"
|
|
|
|
}
|
|
|
|
|
2021-12-07 19:41:42 +01:00
|
|
|
host, port, err := net.SplitHostPort(r.Host)
|
|
|
|
if err != nil {
|
|
|
|
host = r.Host
|
|
|
|
port = ""
|
|
|
|
}
|
2022-05-15 16:48:13 +02:00
|
|
|
|
|
|
|
p := idna.New(idna.ValidateForRegistration())
|
|
|
|
var hostFromPunycode string
|
|
|
|
hostFromPunycode, err = p.ToUnicode(host)
|
|
|
|
if err == nil {
|
|
|
|
host = hostFromPunycode
|
|
|
|
}
|
|
|
|
|
2021-12-07 19:41:42 +01:00
|
|
|
if len(proxyPort) != 0 {
|
|
|
|
port = proxyPort
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(port) == 0 {
|
|
|
|
u.Host = host
|
|
|
|
} else {
|
|
|
|
if port == "80" && u.Scheme == "http" {
|
|
|
|
u.Host = host
|
|
|
|
} else if port == "443" && u.Scheme == "https" {
|
2017-03-22 22:23:29 +01:00
|
|
|
u.Host = host
|
|
|
|
} else {
|
2021-12-07 19:41:42 +01:00
|
|
|
u.Host = net.JoinHostPort(host, port)
|
2017-03-22 22:23:29 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-15 21:16:30 +01:00
|
|
|
return u
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
func (metadata metadata) remainingLimitHeaderValues() (remainingDownloads, remainingDays string) {
|
2019-07-06 20:33:35 +02:00
|
|
|
if metadata.MaxDate.IsZero() {
|
|
|
|
remainingDays = "n/a"
|
|
|
|
} else {
|
2021-12-26 14:03:27 +01:00
|
|
|
timeDifference := time.Until(metadata.MaxDate)
|
2019-07-06 20:33:35 +02:00
|
|
|
remainingDays = strconv.Itoa(int(timeDifference.Hours()/24) + 1)
|
|
|
|
}
|
|
|
|
|
|
|
|
if metadata.MaxDownloads == -1 {
|
|
|
|
remainingDownloads = "n/a"
|
|
|
|
} else {
|
|
|
|
remainingDownloads = strconv.Itoa(metadata.MaxDownloads - metadata.Downloads)
|
|
|
|
}
|
2019-06-17 02:43:22 +02:00
|
|
|
|
|
|
|
return remainingDownloads, remainingDays
|
|
|
|
}
|
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
func (s *Server) lock(token, filename string) {
|
2017-03-28 16:12:31 +02:00
|
|
|
key := path.Join(token, filename)
|
|
|
|
|
2021-07-08 07:48:32 +02:00
|
|
|
lock, _ := s.locks.LoadOrStore(key, &sync.Mutex{})
|
2017-03-28 16:12:31 +02:00
|
|
|
|
2021-07-08 07:48:32 +02:00
|
|
|
lock.(*sync.Mutex).Lock()
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
func (s *Server) unlock(token, filename string) {
|
2017-03-28 16:12:31 +02:00
|
|
|
key := path.Join(token, filename)
|
|
|
|
|
2021-07-08 07:48:32 +02:00
|
|
|
lock, _ := s.locks.LoadOrStore(key, &sync.Mutex{})
|
|
|
|
|
|
|
|
lock.(*sync.Mutex).Unlock()
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
func (s *Server) checkMetadata(ctx context.Context, token, filename string, increaseDownload bool) (metadata, error) {
|
2021-08-19 22:45:30 +02:00
|
|
|
s.lock(token, filename)
|
|
|
|
defer s.unlock(token, filename)
|
2017-03-28 16:12:31 +02:00
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
var metadata metadata
|
2017-03-28 16:12:31 +02:00
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
r, _, err := s.storage.Get(ctx, token, fmt.Sprintf("%s.metadata", filename), nil)
|
|
|
|
defer storage.CloseCheck(r)
|
2021-12-26 14:37:22 +01:00
|
|
|
|
2020-05-09 01:23:32 +02:00
|
|
|
if err != nil {
|
2019-06-17 02:43:22 +02:00
|
|
|
return metadata, err
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if err := json.NewDecoder(r).Decode(&metadata); err != nil {
|
2019-06-17 02:43:22 +02:00
|
|
|
return metadata, err
|
2019-07-06 20:33:35 +02:00
|
|
|
} else if metadata.MaxDownloads != -1 && metadata.Downloads >= metadata.MaxDownloads {
|
2021-08-19 22:45:30 +02:00
|
|
|
return metadata, errors.New("maxDownloads expired")
|
2019-07-06 20:33:35 +02:00
|
|
|
} else if !metadata.MaxDate.IsZero() && time.Now().After(metadata.MaxDate) {
|
2021-08-19 22:45:30 +02:00
|
|
|
return metadata, errors.New("maxDate expired")
|
2021-04-25 19:47:23 +02:00
|
|
|
} else if metadata.MaxDownloads != -1 && increaseDownload {
|
2017-03-28 16:12:31 +02:00
|
|
|
// todo(nl5887): mutex?
|
|
|
|
|
|
|
|
// update number of downloads
|
2021-04-19 20:37:47 +02:00
|
|
|
metadata.Downloads++
|
2017-03-28 16:12:31 +02:00
|
|
|
|
|
|
|
buffer := &bytes.Buffer{}
|
|
|
|
if err := json.NewEncoder(buffer).Encode(metadata); err != nil {
|
2021-12-26 14:37:22 +01:00
|
|
|
return metadata, errors.New("could not encode metadata")
|
2021-12-26 17:17:28 +01:00
|
|
|
} else if err := s.storage.Put(ctx, token, fmt.Sprintf("%s.metadata", filename), buffer, "text/json", uint64(buffer.Len())); err != nil {
|
2021-12-26 14:37:22 +01:00
|
|
|
return metadata, errors.New("could not save metadata")
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-17 02:43:22 +02:00
|
|
|
return metadata, nil
|
2017-03-28 16:12:31 +02:00
|
|
|
}
|
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
func (s *Server) checkDeletionToken(ctx context.Context, deletionToken, token, filename string) error {
|
2021-08-19 22:45:30 +02:00
|
|
|
s.lock(token, filename)
|
|
|
|
defer s.unlock(token, filename)
|
2018-06-24 06:46:57 +02:00
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
var metadata metadata
|
2018-06-24 06:46:57 +02:00
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
r, _, err := s.storage.Get(ctx, token, fmt.Sprintf("%s.metadata", filename), nil)
|
|
|
|
defer storage.CloseCheck(r)
|
2021-12-26 14:37:22 +01:00
|
|
|
|
2018-06-24 06:46:57 +02:00
|
|
|
if s.storage.IsNotExist(err) {
|
2021-08-19 22:45:30 +02:00
|
|
|
return errors.New("metadata doesn't exist")
|
2018-06-24 06:46:57 +02:00
|
|
|
} else if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := json.NewDecoder(r).Decode(&metadata); err != nil {
|
|
|
|
return err
|
|
|
|
} else if metadata.DeletionToken != deletionToken {
|
2021-08-19 22:45:30 +02:00
|
|
|
return errors.New("deletion token doesn't match")
|
2018-06-24 06:46:57 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2021-01-05 17:23:47 +01:00
|
|
|
func (s *Server) purgeHandler() {
|
|
|
|
ticker := time.NewTicker(s.purgeInterval)
|
|
|
|
go func() {
|
|
|
|
for {
|
2021-12-26 14:03:27 +01:00
|
|
|
<-ticker.C
|
2021-12-26 17:17:28 +01:00
|
|
|
err := s.storage.Purge(context.TODO(), s.purgeDays)
|
2021-12-26 14:03:27 +01:00
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("error cleaning up expired files: %v", err)
|
2021-01-05 17:23:47 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
|
|
|
|
2018-06-24 06:46:57 +02:00
|
|
|
func (s *Server) deleteHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
token := vars["token"]
|
|
|
|
filename := vars["filename"]
|
|
|
|
deletionToken := vars["deletionToken"]
|
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
if err := s.checkDeletionToken(r.Context(), deletionToken, token, filename); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error metadata: %s", err.Error())
|
2018-06-24 06:46:57 +02:00
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
err := s.storage.Delete(r.Context(), token, filename)
|
2018-06-24 06:46:57 +02:00
|
|
|
if s.storage.IsNotExist(err) {
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
} else if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not delete file.", http.StatusInternalServerError)
|
2018-06-24 06:46:57 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) zipHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
files := vars["files"]
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
zipfilename := fmt.Sprintf("transfersh-%d.zip", uint16(time.Now().UnixNano()))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/zip")
|
2022-04-10 12:13:06 +02:00
|
|
|
commonHeader(w, zipfilename)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
zw := zip.NewWriter(w)
|
|
|
|
|
|
|
|
for _, key := range strings.Split(files, ",") {
|
2019-03-30 12:35:57 +01:00
|
|
|
key = resolveKey(key, s.proxyPath)
|
2014-11-13 21:41:43 +01:00
|
|
|
|
|
|
|
token := strings.Split(key, "/")[0]
|
2017-03-23 11:46:59 +01:00
|
|
|
filename := sanitize(strings.Split(key, "/")[1])
|
2014-10-20 13:38:40 +02:00
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
if _, err := s.checkMetadata(r.Context(), token, filename, true); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error metadata: %s", err.Error())
|
2017-03-28 16:12:31 +02:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
reader, _, err := s.storage.Get(r.Context(), token, filename, nil)
|
|
|
|
defer storage.CloseCheck(reader)
|
2014-11-13 21:41:43 +01:00
|
|
|
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
2017-03-22 22:27:26 +01:00
|
|
|
if s.storage.IsNotExist(err) {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, "File not found", 404)
|
|
|
|
return
|
|
|
|
}
|
2021-08-19 22:45:30 +02:00
|
|
|
|
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not retrieve file.", http.StatusInternalServerError)
|
2021-08-19 22:45:30 +02:00
|
|
|
return
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
header := &zip.FileHeader{
|
2021-12-26 14:37:22 +01:00
|
|
|
Name: strings.Split(key, "/")[1],
|
|
|
|
Method: zip.Store,
|
|
|
|
|
|
|
|
Modified: time.Now().UTC(),
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fw, err := zw.CreateHeader(header)
|
|
|
|
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Internal server error.", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
if _, err = io.Copy(fw, reader); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Internal server error.", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := zw.Close(); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Internal server error.", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) tarGzHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
files := vars["files"]
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
tarfilename := fmt.Sprintf("transfersh-%d.tar.gz", uint16(time.Now().UnixNano()))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/x-gzip")
|
2022-04-10 12:13:06 +02:00
|
|
|
commonHeader(w, tarfilename)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2021-12-26 14:37:22 +01:00
|
|
|
gw := gzip.NewWriter(w)
|
2023-03-10 17:41:43 +01:00
|
|
|
defer storage.CloseCheck(gw)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2021-12-26 14:37:22 +01:00
|
|
|
zw := tar.NewWriter(gw)
|
2023-03-10 17:41:43 +01:00
|
|
|
defer storage.CloseCheck(zw)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
for _, key := range strings.Split(files, ",") {
|
2019-03-30 12:35:57 +01:00
|
|
|
key = resolveKey(key, s.proxyPath)
|
2014-11-13 21:41:43 +01:00
|
|
|
|
2014-10-20 14:54:42 +02:00
|
|
|
token := strings.Split(key, "/")[0]
|
2017-03-23 11:46:59 +01:00
|
|
|
filename := sanitize(strings.Split(key, "/")[1])
|
2014-10-20 13:38:40 +02:00
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
if _, err := s.checkMetadata(r.Context(), token, filename, true); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error metadata: %s", err.Error())
|
2017-03-28 16:12:31 +02:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
reader, contentLength, err := s.storage.Get(r.Context(), token, filename, nil)
|
|
|
|
defer storage.CloseCheck(reader)
|
2021-12-26 14:37:22 +01:00
|
|
|
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
2017-03-22 22:27:26 +01:00
|
|
|
if s.storage.IsNotExist(err) {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, "File not found", 404)
|
|
|
|
return
|
|
|
|
}
|
2021-08-19 22:45:30 +02:00
|
|
|
|
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not retrieve file.", http.StatusInternalServerError)
|
2021-08-19 22:45:30 +02:00
|
|
|
return
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
header := &tar.Header{
|
|
|
|
Name: strings.Split(key, "/")[1],
|
|
|
|
Size: int64(contentLength),
|
|
|
|
}
|
|
|
|
|
|
|
|
err = zw.WriteHeader(header)
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Internal server error.", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
if _, err = io.Copy(zw, reader); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Internal server error.", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) tarHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
files := vars["files"]
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
tarfilename := fmt.Sprintf("transfersh-%d.tar", uint16(time.Now().UnixNano()))
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
w.Header().Set("Content-Type", "application/x-tar")
|
2022-04-10 12:13:06 +02:00
|
|
|
commonHeader(w, tarfilename)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
zw := tar.NewWriter(w)
|
2023-03-10 17:41:43 +01:00
|
|
|
defer storage.CloseCheck(zw)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
|
|
|
for _, key := range strings.Split(files, ",") {
|
2019-03-30 12:35:57 +01:00
|
|
|
key = resolveKey(key, s.proxyPath)
|
|
|
|
|
2014-10-20 14:54:42 +02:00
|
|
|
token := strings.Split(key, "/")[0]
|
|
|
|
filename := strings.Split(key, "/")[1]
|
2014-10-20 13:38:40 +02:00
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
if _, err := s.checkMetadata(r.Context(), token, filename, true); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error metadata: %s", err.Error())
|
2017-03-28 16:12:31 +02:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
reader, contentLength, err := s.storage.Get(r.Context(), token, filename, nil)
|
|
|
|
defer storage.CloseCheck(reader)
|
2021-12-26 14:37:22 +01:00
|
|
|
|
2014-10-16 20:01:43 +02:00
|
|
|
if err != nil {
|
2017-03-22 22:27:26 +01:00
|
|
|
if s.storage.IsNotExist(err) {
|
2014-10-16 20:01:43 +02:00
|
|
|
http.Error(w, "File not found", 404)
|
|
|
|
return
|
|
|
|
}
|
2021-08-19 22:45:30 +02:00
|
|
|
|
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not retrieve file.", http.StatusInternalServerError)
|
2021-08-19 22:45:30 +02:00
|
|
|
return
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
header := &tar.Header{
|
|
|
|
Name: strings.Split(key, "/")[1],
|
|
|
|
Size: int64(contentLength),
|
|
|
|
}
|
|
|
|
|
|
|
|
err = zw.WriteHeader(header)
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Internal server error.", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2014-10-20 13:38:40 +02:00
|
|
|
if _, err = io.Copy(zw, reader); err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Internal server error.", http.StatusInternalServerError)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-13 08:56:08 +02:00
|
|
|
func (s *Server) headHandler(w http.ResponseWriter, r *http.Request) {
|
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
|
|
|
token := vars["token"]
|
|
|
|
filename := vars["filename"]
|
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
metadata, err := s.checkMetadata(r.Context(), token, filename, false)
|
2019-06-17 02:43:22 +02:00
|
|
|
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error metadata: %s", err.Error())
|
2018-08-13 08:56:08 +02:00
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-03-15 19:35:43 +01:00
|
|
|
contentType := metadata.ContentType
|
2021-12-26 17:17:28 +01:00
|
|
|
contentLength, err := s.storage.Head(r.Context(), token, filename)
|
2018-08-13 08:56:08 +02:00
|
|
|
if s.storage.IsNotExist(err) {
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
} else if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not retrieve file.", http.StatusInternalServerError)
|
2018-08-13 08:56:08 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-07-06 20:33:35 +02:00
|
|
|
remainingDownloads, remainingDays := metadata.remainingLimitHeaderValues()
|
2019-06-17 02:43:22 +02:00
|
|
|
|
2018-08-13 08:56:08 +02:00
|
|
|
w.Header().Set("Content-Type", contentType)
|
|
|
|
w.Header().Set("Content-Length", strconv.FormatUint(contentLength, 10))
|
|
|
|
w.Header().Set("Connection", "close")
|
2019-07-06 20:33:35 +02:00
|
|
|
w.Header().Set("X-Remaining-Downloads", remainingDownloads)
|
|
|
|
w.Header().Set("X-Remaining-Days", remainingDays)
|
2023-03-16 02:25:46 +01:00
|
|
|
w.Header().Set("Vary", "Range, Referer, X-Decrypt-Password")
|
2023-03-10 17:41:43 +01:00
|
|
|
|
|
|
|
if s.storage.IsRangeSupported() {
|
|
|
|
w.Header().Set("Accept-Ranges", "bytes")
|
|
|
|
}
|
2018-08-13 08:56:08 +02:00
|
|
|
}
|
|
|
|
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) getHandler(w http.ResponseWriter, r *http.Request) {
|
2014-10-16 20:01:43 +02:00
|
|
|
vars := mux.Vars(r)
|
|
|
|
|
2018-08-13 08:56:08 +02:00
|
|
|
action := vars["action"]
|
2014-10-16 20:01:43 +02:00
|
|
|
token := vars["token"]
|
|
|
|
filename := vars["filename"]
|
|
|
|
|
2021-12-26 17:17:28 +01:00
|
|
|
metadata, err := s.checkMetadata(r.Context(), token, filename, true)
|
2019-06-17 02:43:22 +02:00
|
|
|
|
|
|
|
if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("Error metadata: %s", err.Error())
|
2017-03-28 16:12:31 +02:00
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-03-10 17:41:43 +01:00
|
|
|
var rng *storage.Range
|
|
|
|
if r.Header.Get("Range") != "" {
|
|
|
|
rng = storage.ParseRange(r.Header.Get("Range"))
|
|
|
|
}
|
|
|
|
|
2020-03-15 19:35:43 +01:00
|
|
|
contentType := metadata.ContentType
|
2023-03-10 17:41:43 +01:00
|
|
|
reader, contentLength, err := s.storage.Get(r.Context(), token, filename, rng)
|
|
|
|
defer storage.CloseCheck(reader)
|
2021-12-26 14:37:22 +01:00
|
|
|
|
2017-03-28 16:12:31 +02:00
|
|
|
if s.storage.IsNotExist(err) {
|
|
|
|
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
|
|
|
|
return
|
|
|
|
} else if err != nil {
|
2021-07-07 21:03:54 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Could not retrieve file.", http.StatusInternalServerError)
|
2017-03-28 16:12:31 +02:00
|
|
|
return
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
2023-03-10 17:41:43 +01:00
|
|
|
if rng != nil {
|
|
|
|
cr := rng.ContentRange()
|
|
|
|
if cr != "" {
|
|
|
|
w.Header().Set("Accept-Ranges", "bytes")
|
|
|
|
w.Header().Set("Content-Range", cr)
|
|
|
|
if rng.Limit > 0 {
|
|
|
|
reader = io.NopCloser(io.LimitReader(reader, int64(rng.Limit)))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2018-08-13 08:56:08 +02:00
|
|
|
var disposition string
|
|
|
|
if action == "inline" {
|
|
|
|
disposition = "inline"
|
2022-08-20 15:29:52 +02:00
|
|
|
/*
|
2023-03-10 17:41:43 +01:00
|
|
|
metadata.ContentType is unable to determine the type of the content,
|
|
|
|
So add text/plain in this case to fix XSS related issues/
|
2022-08-20 15:29:52 +02:00
|
|
|
*/
|
|
|
|
if strings.TrimSpace(contentType) == "" {
|
2023-04-29 13:07:52 +02:00
|
|
|
contentType = "text/plain; charset=utf-8"
|
2023-03-10 17:41:43 +01:00
|
|
|
}
|
2018-08-13 08:56:08 +02:00
|
|
|
} else {
|
|
|
|
disposition = "attachment"
|
|
|
|
}
|
|
|
|
|
2019-07-06 20:33:35 +02:00
|
|
|
remainingDownloads, remainingDays := metadata.remainingLimitHeaderValues()
|
2019-06-17 02:43:22 +02:00
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
w.Header().Set("Content-Disposition", fmt.Sprintf(`%s; filename="%s"`, disposition, filename))
|
2018-08-13 08:56:08 +02:00
|
|
|
w.Header().Set("Connection", "keep-alive")
|
2022-04-10 12:13:06 +02:00
|
|
|
w.Header().Set("Cache-Control", "no-store")
|
2019-07-06 20:33:35 +02:00
|
|
|
w.Header().Set("X-Remaining-Downloads", remainingDownloads)
|
|
|
|
w.Header().Set("X-Remaining-Days", remainingDays)
|
2014-10-16 20:01:43 +02:00
|
|
|
|
2023-03-11 02:08:55 +01:00
|
|
|
password := r.Header.Get("X-Decrypt-Password")
|
2023-04-05 16:30:58 +02:00
|
|
|
reader, err = attachDecryptionReader(reader, password)
|
2023-03-11 02:08:55 +01:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, "Could not decrypt file", http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if metadata.Encrypted && len(password) > 0 {
|
|
|
|
contentType = metadata.DecryptedContentType
|
|
|
|
contentLength = uint64(metadata.ContentLength)
|
|
|
|
}
|
|
|
|
|
|
|
|
w.Header().Set("Content-Type", contentType)
|
|
|
|
w.Header().Set("Content-Length", strconv.FormatUint(contentLength, 10))
|
2023-03-16 02:25:46 +01:00
|
|
|
w.Header().Set("Vary", "Range, Referer, X-Decrypt-Password")
|
2023-03-11 02:08:55 +01:00
|
|
|
|
2023-04-05 16:30:58 +02:00
|
|
|
if rng != nil && rng.ContentRange() != "" {
|
|
|
|
w.WriteHeader(http.StatusPartialContent)
|
|
|
|
}
|
|
|
|
|
|
|
|
if disposition == "inline" && canContainsXSS(contentType) {
|
|
|
|
reader = io.NopCloser(bluemonday.UGCPolicy().SanitizeReader(reader))
|
|
|
|
}
|
|
|
|
|
|
|
|
if _, err = io.Copy(w, reader); err != nil {
|
2021-07-17 10:13:45 +02:00
|
|
|
s.logger.Printf("%s", err.Error())
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Error occurred copying to output stream", http.StatusInternalServerError)
|
2021-07-17 10:13:45 +02:00
|
|
|
return
|
|
|
|
}
|
2014-10-16 20:01:43 +02:00
|
|
|
}
|
|
|
|
|
2022-04-10 12:13:06 +02:00
|
|
|
func commonHeader(w http.ResponseWriter, filename string) {
|
|
|
|
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", filename))
|
|
|
|
w.Header().Set("Connection", "close")
|
|
|
|
w.Header().Set("Cache-Control", "no-store")
|
|
|
|
}
|
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
// RedirectHandler handles redirect
|
2017-03-22 18:09:21 +01:00
|
|
|
func (s *Server) RedirectHandler(h http.Handler) http.HandlerFunc {
|
2014-10-16 20:01:43 +02:00
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
2021-08-19 22:45:30 +02:00
|
|
|
if !s.forceHTTPS {
|
2017-03-22 18:09:21 +01:00
|
|
|
// we don't want to enforce https
|
|
|
|
} else if r.URL.Path == "/health.html" {
|
|
|
|
// health check url won't redirect
|
|
|
|
} else if strings.HasSuffix(ipAddrFromRemoteAddr(r.Host), ".onion") {
|
|
|
|
// .onion addresses cannot get a valid certificate, so don't redirect
|
|
|
|
} else if r.Header.Get("X-Forwarded-Proto") == "https" {
|
2021-12-07 19:41:42 +01:00
|
|
|
} else if r.TLS != nil {
|
2017-03-22 18:09:21 +01:00
|
|
|
} else {
|
2020-05-03 11:19:32 +02:00
|
|
|
u := getURL(r, s.proxyPort)
|
2017-03-22 18:09:21 +01:00
|
|
|
u.Scheme = "https"
|
2021-12-07 19:41:42 +01:00
|
|
|
if len(s.proxyPort) == 0 && len(s.TLSListenerString) > 0 {
|
|
|
|
_, port, err := net.SplitHostPort(s.TLSListenerString)
|
|
|
|
if err != nil || port == "443" {
|
|
|
|
port = ""
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(port) > 0 {
|
|
|
|
u.Host = net.JoinHostPort(u.Hostname(), port)
|
|
|
|
} else {
|
|
|
|
u.Host = u.Hostname()
|
|
|
|
}
|
|
|
|
}
|
2017-03-22 18:09:21 +01:00
|
|
|
|
|
|
|
http.Redirect(w, r, u.String(), http.StatusPermanentRedirect)
|
2014-10-16 20:01:43 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
// LoveHandler Create a log handler for every request it receives.
|
2014-10-16 20:01:43 +02:00
|
|
|
func LoveHandler(h http.Handler) http.HandlerFunc {
|
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
w.Header().Set("x-made-with", "<3 by DutchCoders")
|
|
|
|
w.Header().Set("x-served-by", "Proudly served by DutchCoders")
|
2021-08-19 22:45:30 +02:00
|
|
|
w.Header().Set("server", "Transfer.sh HTTP Server")
|
2014-10-16 20:01:43 +02:00
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
}
|
|
|
|
}
|
2018-06-23 18:46:28 +02:00
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
func ipFilterHandler(h http.Handler, ipFilterOptions *IPFilterOptions) http.HandlerFunc {
|
2019-05-11 14:42:59 +02:00
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
|
|
|
if ipFilterOptions == nil {
|
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
} else {
|
2023-03-12 05:34:41 +01:00
|
|
|
WrapIPFilter(h, ipFilterOptions).ServeHTTP(w, r)
|
2019-05-11 14:42:59 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-19 22:45:30 +02:00
|
|
|
func (s *Server) basicAuthHandler(h http.Handler) http.HandlerFunc {
|
2018-06-23 18:46:28 +02:00
|
|
|
return func(w http.ResponseWriter, r *http.Request) {
|
2023-03-12 05:34:41 +01:00
|
|
|
if s.authUser == "" || s.authPass == "" || s.authHtpasswd == "" {
|
2018-06-23 18:46:28 +02:00
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-03-12 05:34:41 +01:00
|
|
|
if s.htpasswdFile == nil && s.authHtpasswd != "" {
|
|
|
|
htpasswdFile, err := htpasswd.New(s.authHtpasswd, htpasswd.DefaultSystems, nil)
|
2023-03-12 03:52:45 +01:00
|
|
|
if err != nil {
|
|
|
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
s.htpasswdFile = htpasswdFile
|
|
|
|
}
|
|
|
|
|
2023-03-12 05:34:41 +01:00
|
|
|
if s.authIPFilter == nil && s.authIPFilterOptions != nil {
|
|
|
|
s.authIPFilter = newIPFilter(s.authIPFilterOptions)
|
|
|
|
}
|
|
|
|
|
2018-06-23 18:46:28 +02:00
|
|
|
w.Header().Set("WWW-Authenticate", "Basic realm=\"Restricted\"")
|
|
|
|
|
2023-03-12 05:34:41 +01:00
|
|
|
var authorized bool
|
|
|
|
if s.authIPFilter != nil {
|
|
|
|
remoteIP := realip.FromRequest(r)
|
|
|
|
authorized = s.authIPFilter.Allowed(remoteIP)
|
|
|
|
}
|
|
|
|
|
2018-06-23 18:46:28 +02:00
|
|
|
username, password, authOK := r.BasicAuth()
|
2023-03-12 05:34:41 +01:00
|
|
|
if !authOK && !authorized {
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Not authorized", http.StatusUnauthorized)
|
2018-06-23 18:46:28 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-03-12 05:34:41 +01:00
|
|
|
if !authorized && username == s.authUser && password == s.authPass {
|
2023-03-12 03:52:45 +01:00
|
|
|
authorized = true
|
|
|
|
}
|
|
|
|
|
2023-03-12 05:34:41 +01:00
|
|
|
if !authorized && s.htpasswdFile != nil {
|
2023-03-12 03:52:45 +01:00
|
|
|
authorized = s.htpasswdFile.Match(username, password)
|
|
|
|
}
|
|
|
|
|
|
|
|
if !authorized {
|
2022-01-09 22:14:10 +01:00
|
|
|
http.Error(w, "Not authorized", http.StatusUnauthorized)
|
2018-06-23 18:46:28 +02:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
h.ServeHTTP(w, r)
|
|
|
|
}
|
|
|
|
}
|