mirror of
https://github.com/mickael-kerjean/filestash
synced 2025-12-06 08:22:24 +01:00
maintain (go): go format
This commit is contained in:
parent
a0b2ca72ed
commit
f7a4e52703
69 changed files with 2317 additions and 3221 deletions
|
|
@ -40,7 +40,8 @@ func (d *Driver) Drivers() map[string]IBackend {
|
|||
return d.ds
|
||||
}
|
||||
|
||||
type Nothing struct {}
|
||||
type Nothing struct{}
|
||||
|
||||
func (b Nothing) Init(params map[string]string, app *App) (IBackend, error) {
|
||||
return &Nothing{}, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -69,21 +69,19 @@ func NewQuickCache(arg ...time.Duration) AppCache {
|
|||
}
|
||||
}
|
||||
c := AppCache{}
|
||||
c.Cache = cache.New(retention * time.Second, cleanup * time.Second)
|
||||
c.Cache = cache.New(retention*time.Second, cleanup*time.Second)
|
||||
return c
|
||||
}
|
||||
|
||||
|
||||
// ============================================================================
|
||||
|
||||
|
||||
type KeyValueStore struct {
|
||||
cache map[string]interface{}
|
||||
sync.RWMutex
|
||||
}
|
||||
|
||||
func NewKeyValueStore() KeyValueStore {
|
||||
return KeyValueStore{ cache: make(map[string]interface{}) }
|
||||
return KeyValueStore{cache: make(map[string]interface{})}
|
||||
}
|
||||
|
||||
func (this *KeyValueStore) Get(key string) interface{} {
|
||||
|
|
|
|||
|
|
@ -6,24 +6,24 @@ import (
|
|||
)
|
||||
|
||||
const (
|
||||
APP_VERSION = "v0.5"
|
||||
LOG_PATH = "data/state/log/"
|
||||
CONFIG_PATH = "data/state/config/"
|
||||
DB_PATH = "data/state/db/"
|
||||
FTS_PATH = "data/state/search/"
|
||||
CERT_PATH = "data/state/certs/"
|
||||
TMP_PATH = "data/cache/tmp/"
|
||||
COOKIE_NAME_AUTH = "auth"
|
||||
APP_VERSION = "v0.5"
|
||||
LOG_PATH = "data/state/log/"
|
||||
CONFIG_PATH = "data/state/config/"
|
||||
DB_PATH = "data/state/db/"
|
||||
FTS_PATH = "data/state/search/"
|
||||
CERT_PATH = "data/state/certs/"
|
||||
TMP_PATH = "data/cache/tmp/"
|
||||
COOKIE_NAME_AUTH = "auth"
|
||||
COOKIE_NAME_PROOF = "proof"
|
||||
COOKIE_NAME_ADMIN = "admin"
|
||||
COOKIE_PATH_ADMIN = "/admin/api/"
|
||||
COOKIE_PATH = "/api/"
|
||||
FILE_INDEX = "./data/public/index.html"
|
||||
FILE_ASSETS = "./data/public/"
|
||||
URL_SETUP = "/admin/setup"
|
||||
COOKIE_PATH = "/api/"
|
||||
FILE_INDEX = "./data/public/index.html"
|
||||
FILE_ASSETS = "./data/public/"
|
||||
URL_SETUP = "/admin/setup"
|
||||
)
|
||||
|
||||
func init(){
|
||||
func init() {
|
||||
os.MkdirAll(filepath.Join(GetCurrentDir(), LOG_PATH), os.ModePerm)
|
||||
os.MkdirAll(filepath.Join(GetCurrentDir(), FTS_PATH), os.ModePerm)
|
||||
os.MkdirAll(filepath.Join(GetCurrentDir(), CONFIG_PATH), os.ModePerm)
|
||||
|
|
@ -31,11 +31,10 @@ func init(){
|
|||
os.MkdirAll(filepath.Join(GetCurrentDir(), TMP_PATH), os.ModePerm)
|
||||
}
|
||||
|
||||
|
||||
var (
|
||||
BUILD_REF string
|
||||
BUILD_DATE string
|
||||
SECRET_KEY string
|
||||
BUILD_REF string
|
||||
BUILD_DATE string
|
||||
SECRET_KEY string
|
||||
SECRET_KEY_DERIVATE_FOR_PROOF string
|
||||
SECRET_KEY_DERIVATE_FOR_ADMIN string
|
||||
SECRET_KEY_DERIVATE_FOR_USER string
|
||||
|
|
@ -48,8 +47,8 @@ var (
|
|||
*/
|
||||
func InitSecretDerivate(secret string) {
|
||||
SECRET_KEY = secret
|
||||
SECRET_KEY_DERIVATE_FOR_PROOF = Hash("PROOF_" + SECRET_KEY, len(SECRET_KEY))
|
||||
SECRET_KEY_DERIVATE_FOR_ADMIN = Hash("ADMIN_" + SECRET_KEY, len(SECRET_KEY))
|
||||
SECRET_KEY_DERIVATE_FOR_USER = Hash("USER_" + SECRET_KEY, len(SECRET_KEY))
|
||||
SECRET_KEY_DERIVATE_FOR_HASH = Hash("HASH_" + SECRET_KEY, len(SECRET_KEY))
|
||||
SECRET_KEY_DERIVATE_FOR_PROOF = Hash("PROOF_"+SECRET_KEY, len(SECRET_KEY))
|
||||
SECRET_KEY_DERIVATE_FOR_ADMIN = Hash("ADMIN_"+SECRET_KEY, len(SECRET_KEY))
|
||||
SECRET_KEY_DERIVATE_FOR_USER = Hash("USER_"+SECRET_KEY, len(SECRET_KEY))
|
||||
SECRET_KEY_DERIVATE_FOR_HASH = Hash("HASH_"+SECRET_KEY, len(SECRET_KEY))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@ import (
|
|||
"hash/fnv"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
mathrand "math/rand"
|
||||
"math/big"
|
||||
mathrand "math/rand"
|
||||
"os"
|
||||
"runtime"
|
||||
)
|
||||
|
|
@ -32,7 +32,7 @@ func EncryptString(secret string, data string) (string, error) {
|
|||
return base64.URLEncoding.EncodeToString(d), nil
|
||||
}
|
||||
|
||||
func DecryptString(secret string, data string) (string, error){
|
||||
func DecryptString(secret string, data string) (string, error) {
|
||||
d, err := base64.URLEncoding.DecodeString(data)
|
||||
if err != nil {
|
||||
return "", err
|
||||
|
|
@ -74,7 +74,7 @@ func HashStream(r io.Reader, n int) string {
|
|||
|
||||
func hashSize(b []byte, n int) string {
|
||||
h := ""
|
||||
for i:=0; i<len(b); i++ {
|
||||
for i := 0; i < len(b); i++ {
|
||||
if n > 0 && len(h) >= n {
|
||||
break
|
||||
}
|
||||
|
|
@ -82,7 +82,7 @@ func hashSize(b []byte, n int) string {
|
|||
}
|
||||
|
||||
if len(h) > n {
|
||||
return h[0:len(h) - 1]
|
||||
return h[0 : len(h)-1]
|
||||
}
|
||||
return h
|
||||
}
|
||||
|
|
@ -90,7 +90,7 @@ func hashSize(b []byte, n int) string {
|
|||
func ReversedBaseChange(alphabet []rune, i int) string {
|
||||
str := ""
|
||||
for {
|
||||
str += string(alphabet[i % len(alphabet)])
|
||||
str += string(alphabet[i%len(alphabet)])
|
||||
i = i / len(alphabet)
|
||||
if i == 0 {
|
||||
break
|
||||
|
|
@ -122,42 +122,42 @@ func QuickString(n int) string {
|
|||
}
|
||||
|
||||
func encrypt(key []byte, plaintext []byte) ([]byte, error) {
|
||||
c, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gcm, err := cipher.NewGCM(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
gcm, err := cipher.NewGCM(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonce := make([]byte, gcm.NonceSize())
|
||||
if _, err = io.ReadFull(rand.Reader, nonce); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
nonce := make([]byte, gcm.NonceSize())
|
||||
if _, err = io.ReadFull(rand.Reader, nonce); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return gcm.Seal(nonce, nonce, plaintext, nil), nil
|
||||
return gcm.Seal(nonce, nonce, plaintext, nil), nil
|
||||
}
|
||||
|
||||
func decrypt(key []byte, ciphertext []byte) ([]byte, error) {
|
||||
c, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
c, err := aes.NewCipher(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
gcm, err := cipher.NewGCM(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
gcm, err := cipher.NewGCM(c)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
nonceSize := gcm.NonceSize()
|
||||
if len(ciphertext) < nonceSize {
|
||||
return nil, NewError("ciphertext too short", 500)
|
||||
}
|
||||
nonceSize := gcm.NonceSize()
|
||||
if len(ciphertext) < nonceSize {
|
||||
return nil, NewError("ciphertext too short", 500)
|
||||
}
|
||||
|
||||
nonce, ciphertext := ciphertext[:nonceSize], ciphertext[nonceSize:]
|
||||
return gcm.Open(nil, nonce, ciphertext, nil)
|
||||
nonce, ciphertext := ciphertext[:nonceSize], ciphertext[nonceSize:]
|
||||
return gcm.Open(nil, nonce, ciphertext, nil)
|
||||
}
|
||||
|
||||
func compress(something []byte) ([]byte, error) {
|
||||
|
|
@ -243,7 +243,7 @@ func GenerateMachineID() string {
|
|||
if _, err = f.Read(b); err == nil {
|
||||
return string(b)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return "na"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
package common
|
||||
|
||||
import (
|
||||
"runtime"
|
||||
"fmt"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
func PrintMemUsage() {
|
||||
|
|
@ -17,5 +17,5 @@ func PrintMemUsage() {
|
|||
}
|
||||
|
||||
func bToMb(b uint64) uint64 {
|
||||
return b / 1024 / 1024
|
||||
return b / 1024 / 1024
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ package common
|
|||
import (
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
|
|
@ -54,11 +54,13 @@ var DefaultTLSConfig = tls.Config{
|
|||
}
|
||||
|
||||
func NewTransormedTransport(transport http.Transport) http.RoundTripper {
|
||||
return &TransformedTransport{ &transport }
|
||||
return &TransformedTransport{&transport}
|
||||
}
|
||||
|
||||
type TransformedTransport struct {
|
||||
Orig http.RoundTripper
|
||||
}
|
||||
|
||||
func (this *TransformedTransport) RoundTrip(req *http.Request) (*http.Response, error) {
|
||||
req.Header.Add("User-Agent", USER_AGENT)
|
||||
return this.Orig.RoundTrip(req)
|
||||
|
|
|
|||
|
|
@ -8,9 +8,11 @@ import (
|
|||
func NewNilLogger() *slog.Logger {
|
||||
return slog.New(dummyWriter{}, "", slog.LstdFlags)
|
||||
}
|
||||
|
||||
type dummyWriter struct {
|
||||
io.Writer
|
||||
}
|
||||
func(this dummyWriter) Write(p []byte) (n int, err error) {
|
||||
|
||||
func (this dummyWriter) Write(p []byte) (n int, err error) {
|
||||
return len(p), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,21 +12,21 @@ func NewError(message string, status int) error {
|
|||
}
|
||||
|
||||
var (
|
||||
ErrNotFound error = NewError("Not Found", 404)
|
||||
ErrNotAllowed error = NewError("Not Allowed", 403)
|
||||
ErrPermissionDenied error = NewError("Permission Denied", 403)
|
||||
ErrNotValid error = NewError("Not Valid", 405)
|
||||
ErrConflict error = NewError("Already exist", 409)
|
||||
ErrNotReachable error = NewError("Cannot establish a connection", 502)
|
||||
ErrInvalidPassword = NewError("Invalid Password", 403)
|
||||
ErrNotImplemented = NewError("Not Implemented", 501)
|
||||
ErrNotSupported = NewError("Not supported", 501)
|
||||
ErrFilesystemError = NewError("Can't use filesystem", 503)
|
||||
ErrMissingDependency = NewError("Missing dependency", 424)
|
||||
ErrNotAuthorized = NewError("Not authorised", 401)
|
||||
ErrAuthenticationFailed = NewError("Invalid account", 400)
|
||||
ErrCongestion = NewError("Traffic congestion, try again later", 500)
|
||||
ErrTimeout = NewError("Timeout", 500)
|
||||
ErrNotFound error = NewError("Not Found", 404)
|
||||
ErrNotAllowed error = NewError("Not Allowed", 403)
|
||||
ErrPermissionDenied error = NewError("Permission Denied", 403)
|
||||
ErrNotValid error = NewError("Not Valid", 405)
|
||||
ErrConflict error = NewError("Already exist", 409)
|
||||
ErrNotReachable error = NewError("Cannot establish a connection", 502)
|
||||
ErrInvalidPassword = NewError("Invalid Password", 403)
|
||||
ErrNotImplemented = NewError("Not Implemented", 501)
|
||||
ErrNotSupported = NewError("Not supported", 501)
|
||||
ErrFilesystemError = NewError("Can't use filesystem", 503)
|
||||
ErrMissingDependency = NewError("Missing dependency", 424)
|
||||
ErrNotAuthorized = NewError("Not authorised", 401)
|
||||
ErrAuthenticationFailed = NewError("Invalid account", 400)
|
||||
ErrCongestion = NewError("Traffic congestion, try again later", 500)
|
||||
ErrTimeout = NewError("Timeout", 500)
|
||||
)
|
||||
|
||||
type AppError struct {
|
||||
|
|
|
|||
|
|
@ -24,7 +24,7 @@ func IsDirectory(path string) bool {
|
|||
if path == "" {
|
||||
return false
|
||||
}
|
||||
if path[len(path) - 1:] != "/" {
|
||||
if path[len(path)-1:] != "/" {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
|
|
|
|||
|
|
@ -1,17 +1,17 @@
|
|||
package common
|
||||
|
||||
import (
|
||||
slog "log"
|
||||
"fmt"
|
||||
"time"
|
||||
slog "log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
var logfile *os.File
|
||||
|
||||
func init(){
|
||||
func init() {
|
||||
var err error
|
||||
logPath := filepath.Join(GetCurrentDir(), LOG_PATH)
|
||||
logfile, err = os.OpenFile(filepath.Join(logPath, "access.log"), os.O_APPEND|os.O_WRONLY|os.O_CREATE, os.ModePerm)
|
||||
|
|
@ -22,7 +22,7 @@ func init(){
|
|||
logfile.WriteString("")
|
||||
}
|
||||
|
||||
type log struct{
|
||||
type log struct {
|
||||
enable bool
|
||||
debug bool
|
||||
info bool
|
||||
|
|
@ -33,7 +33,7 @@ type log struct{
|
|||
func (l *log) Info(format string, v ...interface{}) {
|
||||
if l.info && l.enable {
|
||||
message := fmt.Sprintf("%s INFO ", l.now())
|
||||
message = fmt.Sprintf(message + format + "\n", v...)
|
||||
message = fmt.Sprintf(message+format+"\n", v...)
|
||||
|
||||
logfile.WriteString(message)
|
||||
fmt.Printf(strings.Replace(message, "%", "%%", -1))
|
||||
|
|
@ -43,7 +43,7 @@ func (l *log) Info(format string, v ...interface{}) {
|
|||
func (l *log) Warning(format string, v ...interface{}) {
|
||||
if l.warn && l.enable {
|
||||
message := fmt.Sprintf("%s WARN ", l.now())
|
||||
message = fmt.Sprintf(message + format + "\n", v...)
|
||||
message = fmt.Sprintf(message+format+"\n", v...)
|
||||
|
||||
logfile.WriteString(message)
|
||||
fmt.Printf(strings.Replace(message, "%", "%%", -1))
|
||||
|
|
@ -53,7 +53,7 @@ func (l *log) Warning(format string, v ...interface{}) {
|
|||
func (l *log) Error(format string, v ...interface{}) {
|
||||
if l.error && l.enable {
|
||||
message := fmt.Sprintf("%s ERROR ", l.now())
|
||||
message = fmt.Sprintf(message + format + "\n", v...)
|
||||
message = fmt.Sprintf(message+format+"\n", v...)
|
||||
|
||||
logfile.WriteString(message)
|
||||
fmt.Printf(strings.Replace(message, "%", "%%", -1))
|
||||
|
|
@ -63,7 +63,7 @@ func (l *log) Error(format string, v ...interface{}) {
|
|||
func (l *log) Debug(format string, v ...interface{}) {
|
||||
if l.debug && l.enable {
|
||||
message := fmt.Sprintf("%s DEBUG ", l.now())
|
||||
message = fmt.Sprintf(message + format + "\n", v...)
|
||||
message = fmt.Sprintf(message+format+"\n", v...)
|
||||
|
||||
logfile.WriteString(message)
|
||||
fmt.Printf(strings.Replace(message, "%", "%%", -1))
|
||||
|
|
@ -72,7 +72,7 @@ func (l *log) Debug(format string, v ...interface{}) {
|
|||
|
||||
func (l *log) Stdout(format string, v ...interface{}) {
|
||||
message := fmt.Sprintf("%s ", l.now())
|
||||
message = fmt.Sprintf(message + format + "\n", v...)
|
||||
message = fmt.Sprintf(message+format+"\n", v...)
|
||||
fmt.Printf(strings.Replace(message, "%", "%%", -1))
|
||||
}
|
||||
|
||||
|
|
@ -114,11 +114,11 @@ func (l *log) SetVisibility(str string) {
|
|||
}
|
||||
}
|
||||
|
||||
func(l *log) Enable(val bool) {
|
||||
func (l *log) Enable(val bool) {
|
||||
l.enable = val
|
||||
}
|
||||
|
||||
var Log = func () log {
|
||||
var Log = func() log {
|
||||
l := log{}
|
||||
l.Enable(true)
|
||||
return l
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
package common
|
||||
|
||||
import (
|
||||
"github.com/gorilla/mux"
|
||||
"io"
|
||||
"net/http"
|
||||
"github.com/gorilla/mux"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
@ -16,19 +16,19 @@ type Plugin struct {
|
|||
Enable bool
|
||||
}
|
||||
|
||||
|
||||
type Register struct{}
|
||||
type Get struct{}
|
||||
|
||||
var Hooks = struct {
|
||||
Get Get
|
||||
Get Get
|
||||
Register Register
|
||||
}{
|
||||
Get: Get{},
|
||||
Get: Get{},
|
||||
Register: Register{},
|
||||
}
|
||||
|
||||
var process_file_content_before_send []func(io.ReadCloser, *App, *http.ResponseWriter, *http.Request) (io.ReadCloser, error)
|
||||
|
||||
func (this Register) ProcessFileContentBeforeSend(fn func(io.ReadCloser, *App, *http.ResponseWriter, *http.Request) (io.ReadCloser, error)) {
|
||||
process_file_content_before_send = append(process_file_content_before_send, fn)
|
||||
}
|
||||
|
|
@ -37,6 +37,7 @@ func (this Get) ProcessFileContentBeforeSend() []func(io.ReadCloser, *App, *http
|
|||
}
|
||||
|
||||
var http_endpoint []func(*mux.Router, *App) error
|
||||
|
||||
func (this Register) HttpEndpoint(fn func(*mux.Router, *App) error) {
|
||||
http_endpoint = append(http_endpoint, fn)
|
||||
}
|
||||
|
|
@ -45,6 +46,7 @@ func (this Get) HttpEndpoint() []func(*mux.Router, *App) error {
|
|||
}
|
||||
|
||||
var starter_process []func(*mux.Router)
|
||||
|
||||
func (this Register) Starter(fn func(*mux.Router)) {
|
||||
starter_process = append(starter_process, fn)
|
||||
}
|
||||
|
|
@ -52,12 +54,12 @@ func (this Get) Starter() []func(*mux.Router) {
|
|||
return starter_process
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* UI Overrides
|
||||
* They are the means by which server plugin change the frontend behaviors.
|
||||
*/
|
||||
var overrides []string
|
||||
|
||||
func (this Register) FrontendOverrides(url string) {
|
||||
overrides = append(overrides, url)
|
||||
}
|
||||
|
|
@ -66,6 +68,7 @@ func (this Get) FrontendOverrides() []string {
|
|||
}
|
||||
|
||||
var xdg_open []string
|
||||
|
||||
func (this Register) XDGOpen(jsString string) {
|
||||
xdg_open = append(xdg_open, jsString)
|
||||
}
|
||||
|
|
@ -74,6 +77,7 @@ func (this Get) XDGOpen() []string {
|
|||
}
|
||||
|
||||
const OverrideVideoSourceMapper = "/overrides/video-transcoder.js"
|
||||
|
||||
func init() {
|
||||
Hooks.Register.FrontendOverrides(OverrideVideoSourceMapper)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ func SendSuccessResultWithEtagAndGzip(res http.ResponseWriter, req *http.Request
|
|||
if strings.Contains(req.Header.Get("Accept-Encoding"), "gzip") == true {
|
||||
mode = "gzip"
|
||||
}
|
||||
hash := QuickHash(mode + string(dataToSend), 20)
|
||||
hash := QuickHash(mode+string(dataToSend), 20)
|
||||
if req.Header.Get("If-None-Match") == hash {
|
||||
res.WriteHeader(http.StatusNotModified)
|
||||
return
|
||||
|
|
@ -60,7 +60,6 @@ func SendSuccessResultWithEtagAndGzip(res http.ResponseWriter, req *http.Request
|
|||
res.Write(dataToSend)
|
||||
}
|
||||
|
||||
|
||||
func SendSuccessResults(res http.ResponseWriter, data interface{}) {
|
||||
encoder := json.NewEncoder(res)
|
||||
encoder.SetEscapeHTML(false)
|
||||
|
|
|
|||
|
|
@ -30,7 +30,7 @@ func generateNewCertificate(root *x509.Certificate, key *rsa.PrivateKey) (*x509.
|
|||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
cert, err := x509.ParseCertificate(certDER);
|
||||
cert, err := x509.ParseCertificate(certDER)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
|
@ -58,7 +58,7 @@ func pullCertificateFromFS() (*x509.Certificate, []byte, error) {
|
|||
}
|
||||
|
||||
func saveCertificateToFS(certPEM []byte) error {
|
||||
file, err := os.OpenFile(certPEMPath, os.O_WRONLY | os.O_CREATE, 0600)
|
||||
file, err := os.OpenFile(certPEMPath, os.O_WRONLY|os.O_CREATE, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -8,10 +8,10 @@ import (
|
|||
)
|
||||
|
||||
func GenerateSelfSigned() (tls.Certificate, *x509.CertPool, error) {
|
||||
var err error
|
||||
var key *rsa.PrivateKey
|
||||
var root *x509.Certificate
|
||||
var keyPEM []byte
|
||||
var err error
|
||||
var key *rsa.PrivateKey
|
||||
var root *x509.Certificate
|
||||
var keyPEM []byte
|
||||
var certPEM []byte
|
||||
var TLSCert tls.Certificate
|
||||
|
||||
|
|
|
|||
|
|
@ -2,11 +2,11 @@ package ssl
|
|||
|
||||
import (
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"path/filepath"
|
||||
"os"
|
||||
"path/filepath"
|
||||
)
|
||||
|
||||
var keyPEMPath string = filepath.Join(GetCurrentDir(), CERT_PATH, "key.pem")
|
||||
var keyPEMPath string = filepath.Join(GetCurrentDir(), CERT_PATH, "key.pem")
|
||||
var certPEMPath string = filepath.Join(GetCurrentDir(), CERT_PATH, "cert.pem")
|
||||
|
||||
func init() {
|
||||
|
|
|
|||
|
|
@ -31,7 +31,7 @@ func generateNewPrivateKey() (*rsa.PrivateKey, []byte, error) {
|
|||
return nil, nil, err
|
||||
}
|
||||
return key, pem.EncodeToMemory(&pem.Block{
|
||||
Type: "RSA PRIVATE KEY",
|
||||
Type: "RSA PRIVATE KEY",
|
||||
Bytes: x509.MarshalPKCS1PrivateKey(key),
|
||||
}), nil
|
||||
}
|
||||
|
|
@ -56,7 +56,7 @@ func pullPrivateKeyFromFS() (*rsa.PrivateKey, []byte, error) {
|
|||
}
|
||||
|
||||
func savePrivateKeyToFS(privatePEM []byte) error {
|
||||
file, err := os.OpenFile(keyPEMPath, os.O_WRONLY | os.O_CREATE, 0600)
|
||||
file, err := os.OpenFile(keyPEMPath, os.O_WRONLY|os.O_CREATE, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,22 +20,22 @@ func GetRoot() (*x509.Certificate, error) {
|
|||
Subject: pkix.Name{
|
||||
Organization: []string{"Filestash"},
|
||||
},
|
||||
NotBefore: time.Now().Add(- 24 * time.Hour),
|
||||
NotBefore: time.Now().Add(-24 * time.Hour),
|
||||
NotAfter: time.Now().Add(24 * 365 * 100 * time.Hour),
|
||||
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature | x509.KeyUsageCertSign,
|
||||
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
|
||||
BasicConstraintsValid: true,
|
||||
IsCA: false,
|
||||
IPAddresses: func() []net.IP {
|
||||
IPAddresses: func() []net.IP {
|
||||
ips := []net.IP{}
|
||||
ifaces, err := net.Interfaces()
|
||||
if err != nil {
|
||||
return []net.IP{ net.ParseIP("127.0.0.1") }
|
||||
return []net.IP{net.ParseIP("127.0.0.1")}
|
||||
}
|
||||
for _, i := range ifaces {
|
||||
addrs, err := i.Addrs()
|
||||
if err != nil {
|
||||
return []net.IP{ net.ParseIP("127.0.0.1") }
|
||||
return []net.IP{net.ParseIP("127.0.0.1")}
|
||||
}
|
||||
for _, addr := range addrs {
|
||||
var ip net.IP
|
||||
|
|
|
|||
|
|
@ -9,13 +9,13 @@ const (
|
|||
)
|
||||
|
||||
type AdminToken struct {
|
||||
Claim string `json:"token"`
|
||||
Expire time.Time `json:"time"`
|
||||
Claim string `json:"token"`
|
||||
Expire time.Time `json:"time"`
|
||||
}
|
||||
|
||||
func NewAdminToken() AdminToken {
|
||||
return AdminToken{
|
||||
Claim: ADMIN_CLAIM,
|
||||
Claim: ADMIN_CLAIM,
|
||||
Expire: time.Now().Add(time.Hour * 24),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -23,7 +23,7 @@ type File struct {
|
|||
FName string `json:"name"`
|
||||
FType string `json:"type"`
|
||||
FTime int64 `json:"time"`
|
||||
FSize int64 `json:"size"`
|
||||
FSize int64 `json:"size"`
|
||||
FPath string `json:"path,omitempty"`
|
||||
CanRename *bool `json:"can_rename,omitempty"`
|
||||
CanMove *bool `json:"can_move_directory,omitempty"`
|
||||
|
|
@ -75,19 +75,19 @@ type Metadata struct {
|
|||
const PASSWORD_DUMMY = "{{PASSWORD}}"
|
||||
|
||||
type Share struct {
|
||||
Id string `json:"id"`
|
||||
Backend string `json:"-"`
|
||||
Auth string `json:"auth,omitempty"`
|
||||
Path string `json:"path"`
|
||||
Password *string `json:"password,omitempty"`
|
||||
Users *string `json:"users,omitempty"`
|
||||
Expire *int64 `json:"expire,omitempty"`
|
||||
Url *string `json:"url,omitempty"`
|
||||
CanShare bool `json:"can_share"`
|
||||
CanManageOwn bool `json:"can_manage_own"`
|
||||
CanRead bool `json:"can_read"`
|
||||
CanWrite bool `json:"can_write"`
|
||||
CanUpload bool `json:"can_upload"`
|
||||
Id string `json:"id"`
|
||||
Backend string `json:"-"`
|
||||
Auth string `json:"auth,omitempty"`
|
||||
Path string `json:"path"`
|
||||
Password *string `json:"password,omitempty"`
|
||||
Users *string `json:"users,omitempty"`
|
||||
Expire *int64 `json:"expire,omitempty"`
|
||||
Url *string `json:"url,omitempty"`
|
||||
CanShare bool `json:"can_share"`
|
||||
CanManageOwn bool `json:"can_manage_own"`
|
||||
CanRead bool `json:"can_read"`
|
||||
CanWrite bool `json:"can_write"`
|
||||
CanUpload bool `json:"can_upload"`
|
||||
}
|
||||
|
||||
func (s Share) IsValid() error {
|
||||
|
|
@ -106,7 +106,7 @@ func (s *Share) MarshalJSON() ([]byte, error) {
|
|||
s.Backend,
|
||||
"",
|
||||
s.Path,
|
||||
func(pass *string) *string{
|
||||
func(pass *string) *string {
|
||||
if pass != nil {
|
||||
return NewString(PASSWORD_DUMMY)
|
||||
}
|
||||
|
|
@ -123,22 +123,31 @@ func (s *Share) MarshalJSON() ([]byte, error) {
|
|||
}
|
||||
return json.Marshal(p)
|
||||
}
|
||||
func(s *Share) UnmarshallJSON(b []byte) error {
|
||||
func (s *Share) UnmarshallJSON(b []byte) error {
|
||||
var tmp map[string]interface{}
|
||||
if err := json.Unmarshal(b, &tmp); err != nil {
|
||||
return err
|
||||
}
|
||||
for key, value := range tmp {
|
||||
switch key {
|
||||
case "password": s.Password = NewStringpFromInterface(value)
|
||||
case "users": s.Users = NewStringpFromInterface(value)
|
||||
case "expire": s.Expire = NewInt64pFromInterface(value)
|
||||
case "url": s.Url = NewStringpFromInterface(value)
|
||||
case "can_share": s.CanShare = NewBoolFromInterface(value)
|
||||
case "can_manage_own": s.CanManageOwn = NewBoolFromInterface(value)
|
||||
case "can_read": s.CanRead = NewBoolFromInterface(value)
|
||||
case "can_write": s.CanWrite = NewBoolFromInterface(value)
|
||||
case "can_upload": s.CanUpload = NewBoolFromInterface(value)
|
||||
case "password":
|
||||
s.Password = NewStringpFromInterface(value)
|
||||
case "users":
|
||||
s.Users = NewStringpFromInterface(value)
|
||||
case "expire":
|
||||
s.Expire = NewInt64pFromInterface(value)
|
||||
case "url":
|
||||
s.Url = NewStringpFromInterface(value)
|
||||
case "can_share":
|
||||
s.CanShare = NewBoolFromInterface(value)
|
||||
case "can_manage_own":
|
||||
s.CanManageOwn = NewBoolFromInterface(value)
|
||||
case "can_read":
|
||||
s.CanRead = NewBoolFromInterface(value)
|
||||
case "can_write":
|
||||
s.CanWrite = NewBoolFromInterface(value)
|
||||
case "can_upload":
|
||||
s.CanUpload = NewBoolFromInterface(value)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
|
|
|
|||
|
|
@ -24,8 +24,10 @@ func NewInt(t int) *int {
|
|||
|
||||
func NewBoolFromInterface(val interface{}) bool {
|
||||
switch val.(type) {
|
||||
case bool: return val.(bool)
|
||||
default: return false
|
||||
case bool:
|
||||
return val.(bool)
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -37,7 +39,8 @@ func NewInt64pFromInterface(val interface{}) *int64 {
|
|||
case float64:
|
||||
v := int64(val.(float64))
|
||||
return &v
|
||||
default: return nil
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -46,7 +49,8 @@ func NewStringpFromInterface(val interface{}) *string {
|
|||
case string:
|
||||
v := val.(string)
|
||||
return &v
|
||||
default: return nil
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -55,7 +59,8 @@ func NewStringFromInterface(val interface{}) string {
|
|||
case string:
|
||||
v := val.(string)
|
||||
return v
|
||||
default: return ""
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ func AdminSessionGet(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
SendSuccessResult(res, true)
|
||||
return
|
||||
}
|
||||
obfuscate := func() string{
|
||||
obfuscate := func() string {
|
||||
c, err := req.Cookie(COOKIE_NAME_ADMIN)
|
||||
if err != nil {
|
||||
return ""
|
||||
|
|
@ -22,7 +22,7 @@ func AdminSessionGet(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
return c.Value
|
||||
}()
|
||||
|
||||
str, err := DecryptString(SECRET_KEY_DERIVATE_FOR_ADMIN, obfuscate);
|
||||
str, err := DecryptString(SECRET_KEY_DERIVATE_FOR_ADMIN, obfuscate)
|
||||
if err != nil {
|
||||
SendSuccessResult(res, false)
|
||||
return
|
||||
|
|
@ -42,7 +42,7 @@ func AdminSessionGet(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
|
||||
func AdminSessionAuthenticate(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
// Step 1: Deliberatly make the request slower to make hacking attempt harder for the attacker
|
||||
time.Sleep(1500*time.Millisecond)
|
||||
time.Sleep(1500 * time.Millisecond)
|
||||
|
||||
// Step 2: Make sure current user has appropriate access
|
||||
admin := Config.Get("auth.admin").String()
|
||||
|
|
@ -66,16 +66,16 @@ func AdminSessionAuthenticate(ctx App, res http.ResponseWriter, req *http.Reques
|
|||
return
|
||||
}
|
||||
http.SetCookie(res, &http.Cookie{
|
||||
Name: COOKIE_NAME_ADMIN,
|
||||
Value: obfuscate,
|
||||
Path: COOKIE_PATH_ADMIN,
|
||||
MaxAge: 60*60, // valid for 1 hour
|
||||
Name: COOKIE_NAME_ADMIN,
|
||||
Value: obfuscate,
|
||||
Path: COOKIE_PATH_ADMIN,
|
||||
MaxAge: 60 * 60, // valid for 1 hour
|
||||
SameSite: http.SameSiteStrictMode,
|
||||
})
|
||||
SendSuccessResult(res, true)
|
||||
}
|
||||
|
||||
func AdminBackend(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
func AdminBackend(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
drivers := Backend.Drivers()
|
||||
backends := make(map[string]Form, len(drivers))
|
||||
for key := range drivers {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
logpath = filepath.Join(GetCurrentDir(), LOG_PATH, "access.log")
|
||||
logpath = filepath.Join(GetCurrentDir(), LOG_PATH, "access.log")
|
||||
configpath = filepath.Join(GetCurrentDir(), CONFIG_PATH, "config.json")
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -3,9 +3,9 @@ package ctrl
|
|||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/gorilla/mux"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"github.com/mickael-kerjean/filestash/server/model"
|
||||
"github.com/gorilla/mux"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
|
|
@ -17,7 +17,6 @@ import (
|
|||
//go:generate sh -c "go run ../generator/emacs-el.go > export_generated.go && go fmt export_generated.go"
|
||||
var EmacsElConfig string = ""
|
||||
|
||||
|
||||
func FileExport(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
http.SetCookie(res, &http.Cookie{
|
||||
Name: "download",
|
||||
|
|
@ -38,10 +37,10 @@ func FileExport(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
var tmpPath string = GetAbsolutePath(TMP_PATH) + "/export_" + QuickString(10)
|
||||
var cmd *exec.Cmd
|
||||
var tmpPath string = GetAbsolutePath(TMP_PATH) + "/export_" + QuickString(10)
|
||||
var cmd *exec.Cmd
|
||||
var emacsPath string
|
||||
var outPath string
|
||||
var outPath string
|
||||
reqMimeType := GetMimeType(path)
|
||||
if reqMimeType == "text/org" {
|
||||
if emacsPath, err = exec.LookPath("emacs"); err != nil {
|
||||
|
|
@ -60,7 +59,7 @@ func FileExport(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
|
||||
// initialise the default emacs.el
|
||||
if f, err := os.OpenFile(GetAbsolutePath(CONFIG_PATH + "emacs.el"), os.O_WRONLY | os.O_CREATE | os.O_EXCL, os.ModePerm); err == nil {
|
||||
if f, err := os.OpenFile(GetAbsolutePath(CONFIG_PATH+"emacs.el"), os.O_WRONLY|os.O_CREATE|os.O_EXCL, os.ModePerm); err == nil {
|
||||
if _, err = f.Write([]byte(EmacsElConfig)); err != nil {
|
||||
SendErrorResult(res, ErrFilesystemError)
|
||||
return
|
||||
|
|
@ -75,57 +74,57 @@ func FileExport(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
cmd = exec.Command(
|
||||
emacsPath, "--no-init-file", "--batch",
|
||||
"--eval", "(setq org-html-extension \"org\")",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH + "emacs.el"),
|
||||
tmpPath + "/index.org", "-f", "org-html-export-to-html",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH+"emacs.el"),
|
||||
tmpPath+"/index.org", "-f", "org-html-export-to-html",
|
||||
)
|
||||
outPath = "index.org.org"
|
||||
} else if mimeType == "application/pdf" {
|
||||
cmd = exec.Command(
|
||||
emacsPath, "--no-init-file", "--batch",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH + "emacs.el"),
|
||||
tmpPath + "/index.org", "-f", "org-latex-export-to-pdf",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH+"emacs.el"),
|
||||
tmpPath+"/index.org", "-f", "org-latex-export-to-pdf",
|
||||
)
|
||||
if query.Get("mode") == "beamer" {
|
||||
cmd = exec.Command(
|
||||
emacsPath, "--no-init-file", "--batch",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH + "emacs.el"),
|
||||
tmpPath + "/index.org", "-f", "org-beamer-export-to-pdf",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH+"emacs.el"),
|
||||
tmpPath+"/index.org", "-f", "org-beamer-export-to-pdf",
|
||||
)
|
||||
}
|
||||
outPath = "index.pdf"
|
||||
} else if mimeType == "text/calendar" {
|
||||
cmd = exec.Command(
|
||||
emacsPath, "--no-init-file", "--batch",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH + "emacs.el"),
|
||||
tmpPath + "/index.org", "-f", "org-icalendar-export-to-ics",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH+"emacs.el"),
|
||||
tmpPath+"/index.org", "-f", "org-icalendar-export-to-ics",
|
||||
)
|
||||
outPath = "index.ics"
|
||||
} else if mimeType == "text/plain" {
|
||||
cmd = exec.Command(
|
||||
emacsPath, "--no-init-file", "--batch",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH + "emacs.el"),
|
||||
tmpPath + "/index.org", "-f", "org-ascii-export-to-ascii",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH+"emacs.el"),
|
||||
tmpPath+"/index.org", "-f", "org-ascii-export-to-ascii",
|
||||
)
|
||||
outPath = "index.txt"
|
||||
} else if mimeType == "text/x-latex" {
|
||||
cmd = exec.Command(
|
||||
emacsPath, "--no-init-file", "--batch",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH + "emacs.el"),
|
||||
tmpPath + "/index.org", "-f", "org-latex-export-to-latex",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH+"emacs.el"),
|
||||
tmpPath+"/index.org", "-f", "org-latex-export-to-latex",
|
||||
)
|
||||
outPath = "index.tex"
|
||||
} else if mimeType == "text/markdown" {
|
||||
cmd = exec.Command(
|
||||
emacsPath, "--no-init-file", "--batch",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH + "emacs.el"),
|
||||
tmpPath + "/index.org", "-f", "org-md-export-to-markdown",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH+"emacs.el"),
|
||||
tmpPath+"/index.org", "-f", "org-md-export-to-markdown",
|
||||
)
|
||||
outPath = "index.md"
|
||||
} else if mimeType == "application/vnd.oasis.opendocument.text" {
|
||||
cmd = exec.Command(
|
||||
emacsPath, "--no-init-file", "--batch",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH + "emacs.el"),
|
||||
tmpPath + "/index.org", "-f", "org-odt-export-to-odt",
|
||||
"--load", GetAbsolutePath(CONFIG_PATH+"emacs.el"),
|
||||
tmpPath+"/index.org", "-f", "org-odt-export-to-odt",
|
||||
)
|
||||
outPath = "index.odt"
|
||||
} else if mimeType == "text/org" {
|
||||
|
|
@ -137,7 +136,7 @@ func FileExport(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
|
||||
os.MkdirAll(tmpPath, os.ModePerm)
|
||||
defer os.RemoveAll(tmpPath)
|
||||
f, err := os.OpenFile(tmpPath + "/index.org", os.O_WRONLY|os.O_CREATE, os.ModePerm)
|
||||
f, err := os.OpenFile(tmpPath+"/index.org", os.O_WRONLY|os.O_CREATE, os.ModePerm)
|
||||
if err != nil {
|
||||
SendErrorResult(res, ErrFilesystemError)
|
||||
return
|
||||
|
|
@ -160,7 +159,7 @@ func FileExport(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
f, err = os.OpenFile(tmpPath + "/"+outPath, os.O_RDONLY, os.ModePerm)
|
||||
f, err = os.OpenFile(tmpPath+"/"+outPath, os.O_RDONLY, os.ModePerm)
|
||||
if err != nil {
|
||||
SendErrorResult(res, ErrFilesystemError)
|
||||
return
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ type FileInfo struct {
|
|||
}
|
||||
|
||||
var (
|
||||
FileCache AppCache
|
||||
FileCache AppCache
|
||||
ZipTimeout int
|
||||
)
|
||||
|
||||
|
|
@ -35,7 +35,7 @@ func init() {
|
|||
FileCache.OnEvict(func(key string, value interface{}) {
|
||||
os.RemoveAll(filepath.Join(cachePath, key))
|
||||
})
|
||||
ZipTimeout = Config.Get("features.protection.zip_timeout").Schema(func(f *FormElement) *FormElement{
|
||||
ZipTimeout = Config.Get("features.protection.zip_timeout").Schema(func(f *FormElement) *FormElement {
|
||||
if f == nil {
|
||||
f = &FormElement{}
|
||||
}
|
||||
|
|
@ -44,7 +44,7 @@ func init() {
|
|||
f.Type = "number"
|
||||
f.Description = "Timeout when user wants to download archive as a zip"
|
||||
f.Placeholder = "Default: 60seconds"
|
||||
return f
|
||||
return f
|
||||
}).Int()
|
||||
}
|
||||
|
||||
|
|
@ -73,7 +73,7 @@ func FileLs(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
files := make([]FileInfo, len(entries))
|
||||
etagger := fnv.New32()
|
||||
etagger.Write([]byte(path + strconv.Itoa(len(entries))))
|
||||
for i:=0; i<len(entries); i++ {
|
||||
for i := 0; i < len(entries); i++ {
|
||||
name := entries[i].Name()
|
||||
modTime := entries[i].ModTime().UnixNano() / int64(time.Millisecond)
|
||||
|
||||
|
|
@ -151,7 +151,7 @@ func FileCat(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
if req.Header.Get("range") != "" {
|
||||
ctx.Session["_path"] = path
|
||||
if p := FileCache.Get(ctx.Session); p != nil {
|
||||
f, err := os.OpenFile(p.(string), os.O_RDONLY, os.ModePerm);
|
||||
f, err := os.OpenFile(p.(string), os.O_RDONLY, os.ModePerm)
|
||||
if err == nil {
|
||||
file = f
|
||||
if fi, err := f.Stat(); err == nil {
|
||||
|
|
@ -171,7 +171,7 @@ func FileCat(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
if req.Header.Get("range") != "" {
|
||||
needToCreateCache = true
|
||||
}
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(path) + "/")
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(path)+"/")
|
||||
}
|
||||
|
||||
// plugin hooks
|
||||
|
|
@ -194,8 +194,8 @@ func FileCat(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
}
|
||||
} else {
|
||||
tmpPath := filepath.Join(GetCurrentDir(), TMP_PATH, "file_" + QuickString(20) + ".dat")
|
||||
f, err := os.OpenFile(tmpPath, os.O_RDWR|os.O_CREATE, os.ModePerm);
|
||||
tmpPath := filepath.Join(GetCurrentDir(), TMP_PATH, "file_"+QuickString(20)+".dat")
|
||||
f, err := os.OpenFile(tmpPath, os.O_RDWR|os.O_CREATE, os.ModePerm)
|
||||
if err != nil {
|
||||
SendErrorResult(res, err)
|
||||
return
|
||||
|
|
@ -241,7 +241,7 @@ func FileCat(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
if start != -1 && end != -1 && end - start >= 0 {
|
||||
if start != -1 && end != -1 && end-start >= 0 {
|
||||
ranges = append(ranges, []int64{start, end})
|
||||
}
|
||||
}
|
||||
|
|
@ -261,9 +261,9 @@ func FileCat(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
if f, ok := file.(io.ReadSeeker); ok && len(ranges) > 0 {
|
||||
if _, err = f.Seek(ranges[0][0], io.SeekStart); err == nil {
|
||||
header.Set("Content-Range", fmt.Sprintf("bytes %d-%d/%d", ranges[0][0], ranges[0][1], contentLength))
|
||||
header.Set("Content-Length", fmt.Sprintf("%d", ranges[0][1] - ranges[0][0] + 1))
|
||||
header.Set("Content-Length", fmt.Sprintf("%d", ranges[0][1]-ranges[0][0]+1))
|
||||
res.WriteHeader(http.StatusPartialContent)
|
||||
io.CopyN(res, f, ranges[0][1] - ranges[0][0] + 1)
|
||||
io.CopyN(res, f, ranges[0][1]-ranges[0][0]+1)
|
||||
} else {
|
||||
res.WriteHeader(http.StatusRequestedRangeNotSatisfiable)
|
||||
}
|
||||
|
|
@ -276,13 +276,13 @@ func FileCat(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
|
||||
func FileAccess(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
allowed := []string{}
|
||||
if model.CanRead(&ctx){
|
||||
if model.CanRead(&ctx) {
|
||||
allowed = append(allowed, "GET")
|
||||
}
|
||||
if model.CanEdit(&ctx){
|
||||
if model.CanEdit(&ctx) {
|
||||
allowed = append(allowed, "PUT")
|
||||
}
|
||||
if model.CanUpload(&ctx){
|
||||
if model.CanUpload(&ctx) {
|
||||
allowed = append(allowed, "POST")
|
||||
}
|
||||
header := res.Header()
|
||||
|
|
@ -338,11 +338,11 @@ func FileSave(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
SendErrorResult(res, NewError(err.Error(), 403))
|
||||
return
|
||||
}
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(path) + "/")
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(path)+"/")
|
||||
go model.SProc.HintFile(&ctx, path)
|
||||
if remErr := req.MultipartForm.RemoveAll(); remErr != nil {
|
||||
Log.Error("couldn't remove multipartform data: %s", err.Error())
|
||||
}
|
||||
}
|
||||
SendSuccessResult(res, nil)
|
||||
}
|
||||
|
||||
|
|
@ -373,8 +373,8 @@ func FileMv(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
go model.SProc.HintRm(&ctx, filepath.Dir(from) + "/")
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(to) + "/")
|
||||
go model.SProc.HintRm(&ctx, filepath.Dir(from)+"/")
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(to)+"/")
|
||||
SendSuccessResult(res, nil)
|
||||
}
|
||||
|
||||
|
|
@ -415,7 +415,7 @@ func FileMkdir(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
SendErrorResult(res, err)
|
||||
return
|
||||
}
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(path) + "/")
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(path)+"/")
|
||||
SendSuccessResult(res, nil)
|
||||
}
|
||||
|
||||
|
|
@ -436,7 +436,7 @@ func FileTouch(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
SendErrorResult(res, err)
|
||||
return
|
||||
}
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(path) + "/")
|
||||
go model.SProc.HintLs(&ctx, filepath.Dir(path)+"/")
|
||||
SendSuccessResult(res, nil)
|
||||
}
|
||||
|
||||
|
|
@ -447,7 +447,7 @@ func FileDownloader(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
paths := req.URL.Query()["path"]
|
||||
for i:=0; i<len(paths); i++ {
|
||||
for i := 0; i < len(paths); i++ {
|
||||
if paths[i], err = PathBuilder(ctx, paths[i]); err != nil {
|
||||
SendErrorResult(res, err)
|
||||
return
|
||||
|
|
@ -465,7 +465,7 @@ func FileDownloader(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
start := time.Now()
|
||||
var addToZipRecursive func(App, *zip.Writer, string, string) error
|
||||
addToZipRecursive = func(c App, zw *zip.Writer, backendPath string, zipRoot string) (err error) {
|
||||
if time.Now().Sub(start) > time.Duration(ZipTimeout) * time.Second {
|
||||
if time.Now().Sub(start) > time.Duration(ZipTimeout)*time.Second {
|
||||
return ErrTimeout
|
||||
}
|
||||
if strings.HasSuffix(backendPath, "/") == false {
|
||||
|
|
@ -506,10 +506,10 @@ func FileDownloader(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
|
||||
zipWriter := zip.NewWriter(res)
|
||||
defer zipWriter.Close()
|
||||
for i:=0; i<len(paths); i++ {
|
||||
for i := 0; i < len(paths); i++ {
|
||||
zipRoot := ""
|
||||
if strings.HasSuffix(paths[i], "/") {
|
||||
zipRoot = strings.TrimSuffix(paths[i], filepath.Base(paths[i]) + "/")
|
||||
zipRoot = strings.TrimSuffix(paths[i], filepath.Base(paths[i])+"/")
|
||||
} else {
|
||||
zipRoot = strings.TrimSuffix(paths[i], filepath.Base(paths[i]))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ func HealthHandler(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
defer file.Close()
|
||||
|
||||
// CHECK2: read from the filesystem
|
||||
if _, err := file.Read(make([]byte, 10)); err != nil {
|
||||
if _, err := file.Read(make([]byte, 10)); err != nil {
|
||||
res.WriteHeader(http.StatusInternalServerError)
|
||||
res.Write([]byte(`{"status": "error", "reason": "fs read error"}`))
|
||||
return
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ func FileSearch(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
|
||||
if ctx.Session["path"] != "" {
|
||||
for i:=0; i<len(searchResults); i++ {
|
||||
for i := 0; i < len(searchResults); i++ {
|
||||
searchResults[i].FPath = "/" + strings.TrimPrefix(
|
||||
searchResults[i].FPath,
|
||||
ctx.Session["path"],
|
||||
|
|
|
|||
|
|
@ -11,12 +11,12 @@ import (
|
|||
)
|
||||
|
||||
type Session struct {
|
||||
Home *string `json:"home,omitempty"`
|
||||
IsAuth bool `json:"is_authenticated"`
|
||||
Home *string `json:"home,omitempty"`
|
||||
IsAuth bool `json:"is_authenticated"`
|
||||
}
|
||||
|
||||
func SessionGet(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
r := Session {
|
||||
r := Session{
|
||||
IsAuth: false,
|
||||
}
|
||||
|
||||
|
|
@ -67,7 +67,7 @@ func SessionAuthenticate(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
s, err := json.Marshal(session);
|
||||
s, err := json.Marshal(session)
|
||||
if err != nil {
|
||||
SendErrorResult(res, NewError(err.Error(), 500))
|
||||
return
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ func ShareList(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
for i:=0; i<len(listOfSharedLinks); i++ {
|
||||
for i := 0; i < len(listOfSharedLinks); i++ {
|
||||
listOfSharedLinks[i].Path = "/" + strings.TrimPrefix(listOfSharedLinks[i].Path, path)
|
||||
}
|
||||
SendSuccessResults(res, listOfSharedLinks)
|
||||
|
|
@ -38,24 +38,24 @@ func ShareUpsert(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
s := Share{
|
||||
Id: share_id,
|
||||
Id: share_id,
|
||||
Auth: func() string {
|
||||
if ctx.Share.Id == "" {
|
||||
a, err := req.Cookie(COOKIE_NAME_AUTH)
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
}
|
||||
return a.Value
|
||||
}
|
||||
return ctx.Share.Auth
|
||||
}(),
|
||||
Backend: func () string {
|
||||
Backend: func() string {
|
||||
if ctx.Share.Id == "" {
|
||||
return GenerateID(&ctx)
|
||||
}
|
||||
return ctx.Share.Backend
|
||||
}(),
|
||||
Path: func () string {
|
||||
Path: func() string {
|
||||
leftPath := "/"
|
||||
rightPath := strings.TrimPrefix(NewStringFromInterface(ctx.Body["path"]), "/")
|
||||
if ctx.Share.Id != "" {
|
||||
|
|
@ -103,13 +103,13 @@ func ShareVerifyProof(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
|
||||
// 1) initialise the current context
|
||||
share_id := mux.Vars(req)["share"]
|
||||
s, err = model.ShareGet(share_id);
|
||||
s, err = model.ShareGet(share_id)
|
||||
if err != nil {
|
||||
SendErrorResult(res, err)
|
||||
return
|
||||
}
|
||||
submittedProof = model.Proof{
|
||||
Key: fmt.Sprint(ctx.Body["type"]),
|
||||
Key: fmt.Sprint(ctx.Body["type"]),
|
||||
Value: fmt.Sprint(ctx.Body["value"]),
|
||||
}
|
||||
verifiedProof = model.ShareProofGetAlreadyVerified(req)
|
||||
|
|
@ -132,7 +132,7 @@ func ShareVerifyProof(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
|
||||
// 3) process the proof sent by the user
|
||||
submittedProof, err = model.ShareProofVerifier(s, submittedProof);
|
||||
submittedProof, err = model.ShareProofVerifier(s, submittedProof)
|
||||
if err != nil {
|
||||
submittedProof.Error = NewString(err.Error())
|
||||
SendSuccessResult(res, submittedProof)
|
||||
|
|
@ -146,7 +146,7 @@ func ShareVerifyProof(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
|
||||
if submittedProof.Key != "" {
|
||||
submittedProof.Id = Hash(submittedProof.Key + "::" + submittedProof.Value, 20)
|
||||
submittedProof.Id = Hash(submittedProof.Key+"::"+submittedProof.Value, 20)
|
||||
verifiedProof = append(verifiedProof, submittedProof)
|
||||
}
|
||||
|
||||
|
|
@ -161,8 +161,8 @@ func ShareVerifyProof(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
str, _ := EncryptString(SECRET_KEY_DERIVATE_FOR_PROOF, string(j))
|
||||
return str
|
||||
}(verifiedProof),
|
||||
Path: COOKIE_PATH,
|
||||
MaxAge: 60 * 60 * 24 * 30,
|
||||
Path: COOKIE_PATH,
|
||||
MaxAge: 60 * 60 * 24 * 30,
|
||||
HttpOnly: true,
|
||||
SameSite: http.SameSiteStrictMode,
|
||||
}
|
||||
|
|
@ -174,10 +174,10 @@ func ShareVerifyProof(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
|
||||
SendSuccessResult(res, struct {
|
||||
Id string `json:"id"`
|
||||
Id string `json:"id"`
|
||||
Path string `json:"path"`
|
||||
}{
|
||||
Id: s.Id,
|
||||
Id: s.Id,
|
||||
Path: s.Path,
|
||||
})
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,15 +1,15 @@
|
|||
package ctrl
|
||||
|
||||
import (
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"fmt"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"io"
|
||||
"text/template"
|
||||
"net/http"
|
||||
URL "net/url"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
func StaticHandler(_path string) func(App, http.ResponseWriter, *http.Request) {
|
||||
|
|
@ -42,7 +42,7 @@ func IndexHandler(_path string) func(App, http.ResponseWriter, *http.Request) {
|
|||
NotFoundHandler(ctx, res, req)
|
||||
return
|
||||
}
|
||||
ua := req.Header.Get("User-Agent");
|
||||
ua := req.Header.Get("User-Agent")
|
||||
if strings.Contains(ua, "MSIE ") || strings.Contains(ua, "Trident/") || strings.Contains(ua, "Edge/") {
|
||||
// Microsoft is behaving on many occasion differently than Firefox / Chrome.
|
||||
// I have neither the time / motivation for it to work properly
|
||||
|
|
@ -82,8 +82,8 @@ func AboutHandler(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
</style>
|
||||
`))
|
||||
t.Execute(res, struct {
|
||||
App []string
|
||||
}{ []string{
|
||||
App []string
|
||||
}{[]string{
|
||||
"Filestash " + APP_VERSION + "." + BUILD_DATE,
|
||||
BUILD_REF,
|
||||
hashFileContent(filepath.Join(GetCurrentDir(), "/filestash"), 0),
|
||||
|
|
@ -92,8 +92,8 @@ func AboutHandler(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
|
||||
func CustomCssHandler(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
res.Header().Set("Content-Type", "text/css");
|
||||
io.WriteString(res, Config.Get("general.custom_css").String());
|
||||
res.Header().Set("Content-Type", "text/css")
|
||||
io.WriteString(res, Config.Get("general.custom_css").String())
|
||||
}
|
||||
|
||||
func ServeFile(res http.ResponseWriter, req *http.Request, filePath string) {
|
||||
|
|
|
|||
|
|
@ -41,20 +41,19 @@ func WebdavHandler(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
|
||||
h := &webdav.Handler{
|
||||
Prefix: "/s/" + ctx.Share.Id,
|
||||
Prefix: "/s/" + ctx.Share.Id,
|
||||
FileSystem: model.NewWebdavFs(ctx.Backend, ctx.Share.Backend, ctx.Share.Path, req),
|
||||
LockSystem: model.NewWebdavLock(),
|
||||
}
|
||||
h.ServeHTTP(res, req)
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* OSX ask for a lot of crap while mounting as a network drive. To avoid wasting resources with such
|
||||
* an imbecile and considering we can't even see the source code they are running, the best approach we
|
||||
* could go on is: "crap in, crap out" where useless request coming in are identified and answer appropriatly
|
||||
*/
|
||||
func WebdavBlacklist (fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
func WebdavBlacklist(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
return func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
base := filepath.Base(req.URL.String())
|
||||
|
||||
|
|
|
|||
|
|
@ -10,14 +10,14 @@ func main() {
|
|||
f, err := os.OpenFile("../../config/emacs.el", os.O_RDONLY, os.ModePerm)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error: %v\n", err)
|
||||
os.Exit(1)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
j, err := ioutil.ReadAll(f)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error: %v\n", err)
|
||||
fmt.Fprintf(os.Stderr, "error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -11,18 +11,18 @@ func main() {
|
|||
f, err := os.OpenFile("../../config/mime.json", os.O_RDONLY, os.ModePerm)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error: %v\n", err)
|
||||
os.Exit(1)
|
||||
os.Exit(1)
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
j, err := ioutil.ReadAll(f)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "error: %v\n", err)
|
||||
fmt.Fprintf(os.Stderr, "error: %v\n", err)
|
||||
os.Exit(1)
|
||||
}
|
||||
mTypes := make(map[string]string, 0)
|
||||
json.Unmarshal(j, &mTypes)
|
||||
json.Unmarshal(j, &mTypes)
|
||||
|
||||
fmt.Printf("package common\n")
|
||||
fmt.Printf("func init() {\n")
|
||||
|
|
|
|||
112
server/main.go
112
server/main.go
|
|
@ -8,7 +8,7 @@ import (
|
|||
. "github.com/mickael-kerjean/filestash/server/middleware"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin"
|
||||
"net/http"
|
||||
"net/http/pprof"
|
||||
"net/http/pprof"
|
||||
"os"
|
||||
"runtime"
|
||||
"runtime/debug"
|
||||
|
|
@ -26,77 +26,77 @@ func Init(a *App) {
|
|||
|
||||
// API for Session
|
||||
session := r.PathPrefix("/api/session").Subrouter()
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax, SessionStart }
|
||||
session.HandleFunc("", NewMiddlewareChain(SessionGet, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax, BodyParser }
|
||||
session.HandleFunc("", NewMiddlewareChain(SessionAuthenticate, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax, SessionTry }
|
||||
session.HandleFunc("", NewMiddlewareChain(SessionLogout, middlewares, *a)).Methods("DELETE")
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax }
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax, SessionStart}
|
||||
session.HandleFunc("", NewMiddlewareChain(SessionGet, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax, BodyParser}
|
||||
session.HandleFunc("", NewMiddlewareChain(SessionAuthenticate, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax, SessionTry}
|
||||
session.HandleFunc("", NewMiddlewareChain(SessionLogout, middlewares, *a)).Methods("DELETE")
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax}
|
||||
session.HandleFunc("/auth/{service}", NewMiddlewareChain(SessionOAuthBackend, middlewares, *a)).Methods("GET")
|
||||
|
||||
// API for admin
|
||||
middlewares = []Middleware{ ApiHeaders, SecureAjax }
|
||||
middlewares = []Middleware{ApiHeaders, SecureAjax}
|
||||
admin := r.PathPrefix("/admin/api").Subrouter()
|
||||
admin.HandleFunc("/session", NewMiddlewareChain(AdminSessionGet, middlewares, *a)).Methods("GET")
|
||||
admin.HandleFunc("/session", NewMiddlewareChain(AdminSessionAuthenticate, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{ ApiHeaders, AdminOnly, SecureAjax }
|
||||
admin.HandleFunc("/config", NewMiddlewareChain(PrivateConfigHandler, middlewares, *a)).Methods("GET")
|
||||
admin.HandleFunc("/config", NewMiddlewareChain(PrivateConfigUpdateHandler, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{ IndexHeaders, AdminOnly, SecureAjax }
|
||||
admin.HandleFunc("/log", NewMiddlewareChain(FetchLogHandler, middlewares, *a)).Methods("GET")
|
||||
admin.HandleFunc("/session", NewMiddlewareChain(AdminSessionGet, middlewares, *a)).Methods("GET")
|
||||
admin.HandleFunc("/session", NewMiddlewareChain(AdminSessionAuthenticate, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{ApiHeaders, AdminOnly, SecureAjax}
|
||||
admin.HandleFunc("/config", NewMiddlewareChain(PrivateConfigHandler, middlewares, *a)).Methods("GET")
|
||||
admin.HandleFunc("/config", NewMiddlewareChain(PrivateConfigUpdateHandler, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{IndexHeaders, AdminOnly, SecureAjax}
|
||||
admin.HandleFunc("/log", NewMiddlewareChain(FetchLogHandler, middlewares, *a)).Methods("GET")
|
||||
|
||||
// API for File management
|
||||
files := r.PathPrefix("/api/files").Subrouter()
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SessionStart, LoggedInOnly }
|
||||
files.HandleFunc("/cat", NewMiddlewareChain(FileCat, middlewares, *a)).Methods("GET", "HEAD")
|
||||
files.HandleFunc("/zip", NewMiddlewareChain(FileDownloader, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax, SessionStart, LoggedInOnly }
|
||||
files.HandleFunc("/cat", NewMiddlewareChain(FileAccess, middlewares, *a)).Methods("OPTIONS")
|
||||
files.HandleFunc("/cat", NewMiddlewareChain(FileSave, middlewares, *a)).Methods("POST")
|
||||
files.HandleFunc("/ls", NewMiddlewareChain(FileLs, middlewares, *a)).Methods("GET")
|
||||
files.HandleFunc("/mv", NewMiddlewareChain(FileMv, middlewares, *a)).Methods("GET")
|
||||
files.HandleFunc("/rm", NewMiddlewareChain(FileRm, middlewares, *a)).Methods("GET")
|
||||
files.HandleFunc("/mkdir", NewMiddlewareChain(FileMkdir, middlewares, *a)).Methods("GET")
|
||||
files.HandleFunc("/touch", NewMiddlewareChain(FileTouch, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ ApiHeaders, SessionStart, LoggedInOnly }
|
||||
files.HandleFunc("/search", NewMiddlewareChain(FileSearch, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SessionStart, LoggedInOnly}
|
||||
files.HandleFunc("/cat", NewMiddlewareChain(FileCat, middlewares, *a)).Methods("GET", "HEAD")
|
||||
files.HandleFunc("/zip", NewMiddlewareChain(FileDownloader, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax, SessionStart, LoggedInOnly}
|
||||
files.HandleFunc("/cat", NewMiddlewareChain(FileAccess, middlewares, *a)).Methods("OPTIONS")
|
||||
files.HandleFunc("/cat", NewMiddlewareChain(FileSave, middlewares, *a)).Methods("POST")
|
||||
files.HandleFunc("/ls", NewMiddlewareChain(FileLs, middlewares, *a)).Methods("GET")
|
||||
files.HandleFunc("/mv", NewMiddlewareChain(FileMv, middlewares, *a)).Methods("GET")
|
||||
files.HandleFunc("/rm", NewMiddlewareChain(FileRm, middlewares, *a)).Methods("GET")
|
||||
files.HandleFunc("/mkdir", NewMiddlewareChain(FileMkdir, middlewares, *a)).Methods("GET")
|
||||
files.HandleFunc("/touch", NewMiddlewareChain(FileTouch, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ApiHeaders, SessionStart, LoggedInOnly}
|
||||
files.HandleFunc("/search", NewMiddlewareChain(FileSearch, middlewares, *a)).Methods("GET")
|
||||
|
||||
// API for exporter
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, RedirectSharedLoginIfNeeded, SessionStart, LoggedInOnly }
|
||||
r.PathPrefix("/api/export/{share}/{mtype0}/{mtype1}").Handler(NewMiddlewareChain(FileExport, middlewares, *a))
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, RedirectSharedLoginIfNeeded, SessionStart, LoggedInOnly}
|
||||
r.PathPrefix("/api/export/{share}/{mtype0}/{mtype1}").Handler(NewMiddlewareChain(FileExport, middlewares, *a))
|
||||
|
||||
// API for Shared link
|
||||
share := r.PathPrefix("/api/share").Subrouter()
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax, SessionStart, LoggedInOnly }
|
||||
share.HandleFunc("", NewMiddlewareChain(ShareList, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax, BodyParser }
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax, SessionStart, LoggedInOnly}
|
||||
share.HandleFunc("", NewMiddlewareChain(ShareList, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax, BodyParser}
|
||||
share.HandleFunc("/{share}/proof", NewMiddlewareChain(ShareVerifyProof, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax, CanManageShare }
|
||||
share.HandleFunc("/{share}", NewMiddlewareChain(ShareDelete, middlewares, *a)).Methods("DELETE")
|
||||
middlewares = []Middleware{ ApiHeaders, SecureHeaders, SecureAjax, BodyParser, CanManageShare }
|
||||
share.HandleFunc("/{share}", NewMiddlewareChain(ShareUpsert, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax, CanManageShare}
|
||||
share.HandleFunc("/{share}", NewMiddlewareChain(ShareDelete, middlewares, *a)).Methods("DELETE")
|
||||
middlewares = []Middleware{ApiHeaders, SecureHeaders, SecureAjax, BodyParser, CanManageShare}
|
||||
share.HandleFunc("/{share}", NewMiddlewareChain(ShareUpsert, middlewares, *a)).Methods("POST")
|
||||
|
||||
// Webdav server / Shared Link
|
||||
middlewares = []Middleware{ IndexHeaders, SecureHeaders }
|
||||
r.HandleFunc("/s/{share}", NewMiddlewareChain(IndexHandler(FILE_INDEX), middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ WebdavBlacklist, SessionStart }
|
||||
r.PathPrefix("/s/{share}").Handler(NewMiddlewareChain(WebdavHandler, middlewares, *a))
|
||||
middlewares = []Middleware{IndexHeaders, SecureHeaders}
|
||||
r.HandleFunc("/s/{share}", NewMiddlewareChain(IndexHandler(FILE_INDEX), middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{WebdavBlacklist, SessionStart}
|
||||
r.PathPrefix("/s/{share}").Handler(NewMiddlewareChain(WebdavHandler, middlewares, *a))
|
||||
|
||||
// Application Resources
|
||||
middlewares = []Middleware{ ApiHeaders }
|
||||
r.HandleFunc("/api/config", NewMiddlewareChain(PublicConfigHandler, middlewares, *a)).Methods("GET")
|
||||
r.HandleFunc("/api/backend", NewMiddlewareChain(AdminBackend, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ StaticHeaders }
|
||||
r.PathPrefix("/assets").Handler(http.HandlerFunc(NewMiddlewareChain(StaticHandler(FILE_ASSETS), middlewares, *a))).Methods("GET")
|
||||
r.HandleFunc("/favicon.ico", NewMiddlewareChain(StaticHandler(FILE_ASSETS + "/assets/logo/"), middlewares, *a)).Methods("GET")
|
||||
r.HandleFunc("/sw_cache.js", NewMiddlewareChain(StaticHandler(FILE_ASSETS + "/assets/worker/"), middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ApiHeaders}
|
||||
r.HandleFunc("/api/config", NewMiddlewareChain(PublicConfigHandler, middlewares, *a)).Methods("GET")
|
||||
r.HandleFunc("/api/backend", NewMiddlewareChain(AdminBackend, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{StaticHeaders}
|
||||
r.PathPrefix("/assets").Handler(http.HandlerFunc(NewMiddlewareChain(StaticHandler(FILE_ASSETS), middlewares, *a))).Methods("GET")
|
||||
r.HandleFunc("/favicon.ico", NewMiddlewareChain(StaticHandler(FILE_ASSETS+"/assets/logo/"), middlewares, *a)).Methods("GET")
|
||||
r.HandleFunc("/sw_cache.js", NewMiddlewareChain(StaticHandler(FILE_ASSETS+"/assets/worker/"), middlewares, *a)).Methods("GET")
|
||||
|
||||
// Other endpoints
|
||||
middlewares = []Middleware{ ApiHeaders }
|
||||
r.HandleFunc("/report", NewMiddlewareChain(ReportHandler, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{ IndexHeaders }
|
||||
r.HandleFunc("/about", NewMiddlewareChain(AboutHandler, middlewares, *a)).Methods("GET")
|
||||
middlewares = []Middleware{ApiHeaders}
|
||||
r.HandleFunc("/report", NewMiddlewareChain(ReportHandler, middlewares, *a)).Methods("POST")
|
||||
middlewares = []Middleware{IndexHeaders}
|
||||
r.HandleFunc("/about", NewMiddlewareChain(AboutHandler, middlewares, *a)).Methods("GET")
|
||||
r.HandleFunc("/robots.txt", func(res http.ResponseWriter, req *http.Request) {
|
||||
res.Write([]byte(""))
|
||||
})
|
||||
|
|
@ -110,7 +110,7 @@ func Init(a *App) {
|
|||
initPluginsRoutes(r, a)
|
||||
|
||||
r.PathPrefix("/admin").Handler(http.HandlerFunc(NewMiddlewareChain(IndexHandler(FILE_INDEX), middlewares, *a))).Methods("GET")
|
||||
r.PathPrefix("/" ).Handler(http.HandlerFunc(NewMiddlewareChain(IndexHandler(FILE_INDEX), middlewares, *a))).Methods("GET")
|
||||
r.PathPrefix("/").Handler(http.HandlerFunc(NewMiddlewareChain(IndexHandler(FILE_INDEX), middlewares, *a))).Methods("GET")
|
||||
|
||||
// Routes are served via plugins to avoid getting stuck with plain HTTP. The idea is to
|
||||
// support many more protocols in the future: HTTPS, HTTP2, TOR or whatever that sounds
|
||||
|
|
@ -123,7 +123,7 @@ func Init(a *App) {
|
|||
Log.Warning("No starter plugin available")
|
||||
return
|
||||
}
|
||||
select { }
|
||||
select {}
|
||||
}
|
||||
|
||||
func initDebugRoutes(r *mux.Router) {
|
||||
|
|
@ -174,7 +174,7 @@ func initPluginsRoutes(r *mux.Router, a *App) {
|
|||
res.Header().Set("Content-Type", GetMimeType(req.URL.String()))
|
||||
res.Write([]byte(`window.overrides["xdg-open"] = function(mime){`))
|
||||
openers := Hooks.Get.XDGOpen()
|
||||
for i:=0; i<len(openers); i++ {
|
||||
for i := 0; i < len(openers); i++ {
|
||||
res.Write([]byte(openers[i]))
|
||||
}
|
||||
res.Write([]byte(`return null;}`))
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import (
|
|||
"net/http"
|
||||
)
|
||||
|
||||
func BodyParser (fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
func BodyParser(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
extractBody := func(req *http.Request) (map[string]interface{}, error) {
|
||||
var body map[string]interface{}
|
||||
byt, err := ioutil.ReadAll(req.Body)
|
||||
|
|
|
|||
|
|
@ -59,7 +59,7 @@ func IndexHeaders(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App
|
|||
cspHeader += "worker-src 'self' blob:; "
|
||||
cspHeader += "form-action 'self'; base-uri 'self'; "
|
||||
cspHeader += "frame-src 'self'; "
|
||||
if allowedDomainsForIframe := Config.Get("features.protection.iframe").Schema(func(f *FormElement) *FormElement{
|
||||
if allowedDomainsForIframe := Config.Get("features.protection.iframe").Schema(func(f *FormElement) *FormElement {
|
||||
if f == nil {
|
||||
f = &FormElement{}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -3,17 +3,17 @@ package middleware
|
|||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"time"
|
||||
"net/http"
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Middleware func(func(App, http.ResponseWriter, *http.Request)) func(App, http.ResponseWriter, *http.Request)
|
||||
|
||||
func NewMiddlewareChain(fn func(App, http.ResponseWriter, *http.Request), m []Middleware, app App) http.HandlerFunc {
|
||||
return func(res http.ResponseWriter, req *http.Request) {
|
||||
var resw ResponseWriter = NewResponseWriter(res)
|
||||
var resw ResponseWriter = NewResponseWriter(res)
|
||||
var f func(App, http.ResponseWriter, *http.Request) = fn
|
||||
|
||||
for i := len(m) - 1; i >= 0; i-- {
|
||||
|
|
@ -36,7 +36,7 @@ type ResponseWriter struct {
|
|||
func NewResponseWriter(res http.ResponseWriter) ResponseWriter {
|
||||
return ResponseWriter{
|
||||
ResponseWriter: res,
|
||||
start: time.Now(),
|
||||
start: time.Now(),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -53,18 +53,18 @@ func (w *ResponseWriter) Write(b []byte) (int, error) {
|
|||
}
|
||||
|
||||
type LogEntry struct {
|
||||
Host string `json:"host"`
|
||||
Method string `json:"method"`
|
||||
RequestURI string `json:"pathname"`
|
||||
Proto string `json:"proto"`
|
||||
Status int `json:"status"`
|
||||
Scheme string `json:"scheme"`
|
||||
UserAgent string `json:"userAgent"`
|
||||
Ip string `json:"ip"`
|
||||
Referer string `json:"referer"`
|
||||
Duration float64 `json:"responseTime"`
|
||||
Version string `json:"version"`
|
||||
Backend string `json:"backend"`
|
||||
Host string `json:"host"`
|
||||
Method string `json:"method"`
|
||||
RequestURI string `json:"pathname"`
|
||||
Proto string `json:"proto"`
|
||||
Status int `json:"status"`
|
||||
Scheme string `json:"scheme"`
|
||||
UserAgent string `json:"userAgent"`
|
||||
Ip string `json:"ip"`
|
||||
Referer string `json:"referer"`
|
||||
Duration float64 `json:"responseTime"`
|
||||
Version string `json:"version"`
|
||||
Backend string `json:"backend"`
|
||||
}
|
||||
|
||||
func Logger(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
|
|
@ -130,10 +130,10 @@ func (this *Telemetry) Flush() {
|
|||
resp.Body.Close()
|
||||
}
|
||||
|
||||
var telemetry Telemetry = Telemetry{ Data: make([]LogEntry, 0) }
|
||||
var telemetry Telemetry = Telemetry{Data: make([]LogEntry, 0)}
|
||||
|
||||
func init(){
|
||||
go func(){
|
||||
func init() {
|
||||
go func() {
|
||||
for {
|
||||
time.Sleep(10 * time.Second)
|
||||
telemetry.Flush()
|
||||
|
|
|
|||
|
|
@ -5,9 +5,9 @@ import (
|
|||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/gorilla/mux"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"github.com/mickael-kerjean/filestash/server/model"
|
||||
"github.com/gorilla/mux"
|
||||
"net/http"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
|
@ -26,13 +26,13 @@ func LoggedInOnly(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App
|
|||
func AdminOnly(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
return func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
if admin := Config.Get("auth.admin").String(); admin != "" {
|
||||
c, err := req.Cookie(COOKIE_NAME_ADMIN);
|
||||
c, err := req.Cookie(COOKIE_NAME_ADMIN)
|
||||
if err != nil {
|
||||
SendErrorResult(res, ErrPermissionDenied)
|
||||
return
|
||||
}
|
||||
|
||||
str, err := DecryptString(SECRET_KEY_DERIVATE_FOR_ADMIN, c.Value);
|
||||
str, err := DecryptString(SECRET_KEY_DERIVATE_FOR_ADMIN, c.Value)
|
||||
if err != nil {
|
||||
SendErrorResult(res, ErrPermissionDenied)
|
||||
return
|
||||
|
|
@ -49,7 +49,7 @@ func AdminOnly(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, r
|
|||
}
|
||||
}
|
||||
|
||||
func SessionStart (fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
func SessionStart(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
return func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
var err error
|
||||
if ctx.Share, err = _extractShare(req); err != nil {
|
||||
|
|
@ -72,7 +72,7 @@ func SessionStart (fn func(App, http.ResponseWriter, *http.Request)) func(ctx Ap
|
|||
}
|
||||
}
|
||||
|
||||
func SessionTry (fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
func SessionTry(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
return func(ctx App, res http.ResponseWriter, req *http.Request) {
|
||||
ctx.Share, _ = _extractShare(req)
|
||||
ctx.Session, _ = _extractSession(req, &ctx)
|
||||
|
|
@ -93,7 +93,7 @@ func RedirectSharedLoginIfNeeded(fn func(App, http.ResponseWriter, *http.Request
|
|||
return
|
||||
}
|
||||
|
||||
share, err := _extractShare(req);
|
||||
share, err := _extractShare(req)
|
||||
if err != nil || share_id != share.Id {
|
||||
http.Redirect(res, req, fmt.Sprintf("/s/%s?next=%s", share_id, req.URL.Path), http.StatusTemporaryRedirect)
|
||||
return
|
||||
|
|
@ -187,7 +187,7 @@ func _extractShare(req *http.Request) (Share, error) {
|
|||
}
|
||||
|
||||
var verifiedProof []model.Proof = model.ShareProofGetAlreadyVerified(req)
|
||||
username, password := func(authHeader string) (string, string){
|
||||
username, password := func(authHeader string) (string, string) {
|
||||
decoded, err := base64.StdEncoding.DecodeString(
|
||||
strings.TrimPrefix(authHeader, "Basic "),
|
||||
)
|
||||
|
|
@ -203,7 +203,7 @@ func _extractShare(req *http.Request) (Share, error) {
|
|||
if len(usr) != 3 {
|
||||
return "", p
|
||||
}
|
||||
if Hash(usr[1] + SECRET_KEY_DERIVATE_FOR_HASH, 10) != usr[2] {
|
||||
if Hash(usr[1]+SECRET_KEY_DERIVATE_FOR_HASH, 10) != usr[2] {
|
||||
return "", p
|
||||
}
|
||||
return usr[1], p
|
||||
|
|
@ -211,12 +211,12 @@ func _extractShare(req *http.Request) (Share, error) {
|
|||
|
||||
if s.Users != nil && username != "" {
|
||||
if v, ok := model.ShareProofVerifierEmail(*s.Users, username); ok {
|
||||
verifiedProof = append(verifiedProof, model.Proof{ Key: "email", Value: v })
|
||||
verifiedProof = append(verifiedProof, model.Proof{Key: "email", Value: v})
|
||||
}
|
||||
}
|
||||
if s.Password != nil && password != "" {
|
||||
if v, ok := model.ShareProofVerifierPassword(*s.Password, password); ok {
|
||||
verifiedProof = append(verifiedProof, model.Proof{ Key: "password", Value: v })
|
||||
verifiedProof = append(verifiedProof, model.Proof{Key: "password", Value: v})
|
||||
}
|
||||
}
|
||||
var requiredProof []model.Proof = model.ShareProofGetRequired(s)
|
||||
|
|
|
|||
|
|
@ -26,7 +26,6 @@ func init() {
|
|||
Backend.Register("gdrive", GDrive{})
|
||||
}
|
||||
|
||||
|
||||
func (g GDrive) Init(params map[string]string, app *App) (IBackend, error) {
|
||||
backend := GDrive{}
|
||||
|
||||
|
|
@ -71,21 +70,21 @@ func (g GDrive) LoginForm() Form {
|
|||
return Form{
|
||||
Elmnts: []FormElement{
|
||||
FormElement{
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "gdrive",
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "gdrive",
|
||||
},
|
||||
FormElement{
|
||||
ReadOnly: true,
|
||||
Name: "oauth2",
|
||||
Type: "text",
|
||||
Value: "/api/session/auth/gdrive",
|
||||
ReadOnly: true,
|
||||
Name: "oauth2",
|
||||
Type: "text",
|
||||
Value: "/api/session/auth/gdrive",
|
||||
},
|
||||
FormElement{
|
||||
ReadOnly: true,
|
||||
Name: "image",
|
||||
Type: "image",
|
||||
Value: "data:image/svg+xml;base64,PHN2ZyB2aWV3Qm94PSIwIDAgMTM5IDEyMC40IiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIj4KICA8cGF0aCBkPSJtMjQuMiAxMjAuNC0yNC4yLTQxLjkgNDUuMy03OC41IDI0LjIgNDEuOXoiIGZpbGw9IiMwZGE5NjAiLz4KICA8cGF0aCBkPSJtNTguOSA2MC4yIDEwLjYtMTguMy0yNC4yLTQxLjl6IiBmaWxsPSIjMGRhOTYwIi8+CiAgPHBhdGggZD0ibTI0LjIgMTIwLjQgMjQuMi00MS45aDkwLjZsLTI0LjIgNDEuOXoiIGZpbGw9IiMyZDZmZGQiLz4KICA8cGF0aCBkPSJtNjkuNSA3OC41aC0yMS4xbDEwLjUtMTguMy0zNC43IDYwLjJ6IiBmaWxsPSIjMmQ2ZmRkIi8+ICAKICA8cGF0aCBkPSJtMTM5IDc4LjVoLTQ4LjRsLTQ1LjMtNzguNWg0OC40eiIgZmlsbD0iI2ZmZDI0ZCIvPgogIDxwYXRoIGQ9Im05MC42IDc4LjVoNDguNGwtNTguOS0xOC4zeiIgZmlsbD0iI2ZmZDI0ZCIvPgo8L3N2Zz4K",
|
||||
ReadOnly: true,
|
||||
Name: "image",
|
||||
Type: "image",
|
||||
Value: "data:image/svg+xml;base64,PHN2ZyB2aWV3Qm94PSIwIDAgMTM5IDEyMC40IiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIj4KICA8cGF0aCBkPSJtMjQuMiAxMjAuNC0yNC4yLTQxLjkgNDUuMy03OC41IDI0LjIgNDEuOXoiIGZpbGw9IiMwZGE5NjAiLz4KICA8cGF0aCBkPSJtNTguOSA2MC4yIDEwLjYtMTguMy0yNC4yLTQxLjl6IiBmaWxsPSIjMGRhOTYwIi8+CiAgPHBhdGggZD0ibTI0LjIgMTIwLjQgMjQuMi00MS45aDkwLjZsLTI0LjIgNDEuOXoiIGZpbGw9IiMyZDZmZGQiLz4KICA8cGF0aCBkPSJtNjkuNSA3OC41aC0yMS4xbDEwLjUtMTguMy0zNC43IDYwLjJ6IiBmaWxsPSIjMmQ2ZmRkIi8+ICAKICA8cGF0aCBkPSJtMTM5IDc4LjVoLTQ4LjRsLTQ1LjMtNzguNWg0OC40eiIgZmlsbD0iI2ZmZDI0ZCIvPgogIDxwYXRoIGQ9Im05MC42IDc4LjVoNDguNGwtNTguOS0xOC4zeiIgZmlsbD0iI2ZmZDI0ZCIvPgo8L3N2Zz4K",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -96,7 +96,7 @@ func (git Git) Init(params map[string]string, app *App) (IBackend, error) {
|
|||
return nil, NewError("Your password doesn't fit in a cookie :/", 500)
|
||||
}
|
||||
|
||||
hash := GenerateID(app)
|
||||
hash := GenerateID(app)
|
||||
p.basePath = GetAbsolutePath(GitCachePath + "repo_" + hash + "/")
|
||||
|
||||
repo, err := g.git.open(p, p.basePath)
|
||||
|
|
@ -112,9 +112,9 @@ func (g Git) LoginForm() Form {
|
|||
return Form{
|
||||
Elmnts: []FormElement{
|
||||
FormElement{
|
||||
Name: "type",
|
||||
Value: "git",
|
||||
Type: "hidden",
|
||||
Name: "type",
|
||||
Value: "git",
|
||||
Type: "hidden",
|
||||
},
|
||||
FormElement{
|
||||
Name: "repo",
|
||||
|
|
@ -135,7 +135,7 @@ func (g Git) LoginForm() Form {
|
|||
Name: "advanced",
|
||||
Type: "enable",
|
||||
Placeholder: "Advanced",
|
||||
Target: []string{
|
||||
Target: []string{
|
||||
"git_path", "git_passphrase", "git_commit",
|
||||
"git_branch", "git_author_email", "git_author_name",
|
||||
"git_committer_email", "git_committer_name",
|
||||
|
|
@ -152,7 +152,6 @@ func (g Git) LoginForm() Form {
|
|||
Name: "passphrase",
|
||||
Type: "text",
|
||||
Placeholder: "Passphrase",
|
||||
|
||||
},
|
||||
FormElement{
|
||||
Id: "git_commit",
|
||||
|
|
@ -194,7 +193,6 @@ func (g Git) LoginForm() Form {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
func (g Git) Ls(path string) ([]os.FileInfo, error) {
|
||||
g.git.refresh()
|
||||
p, err := g.path(path)
|
||||
|
|
|
|||
|
|
@ -110,8 +110,8 @@ func (s Sftp) Init(params map[string]string, app *App) (IBackend, error) {
|
|||
}
|
||||
|
||||
config := &ssh.ClientConfig{
|
||||
User: p.username,
|
||||
Auth: auth,
|
||||
User: p.username,
|
||||
Auth: auth,
|
||||
HostKeyCallback: func(hostname string, remote net.Addr, key ssh.PublicKey) error {
|
||||
if params["hostkey"] == "" {
|
||||
return nil
|
||||
|
|
@ -143,9 +143,9 @@ func (b Sftp) LoginForm() Form {
|
|||
return Form{
|
||||
Elmnts: []FormElement{
|
||||
FormElement{
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "sftp",
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "sftp",
|
||||
},
|
||||
FormElement{
|
||||
Name: "hostname",
|
||||
|
|
@ -179,7 +179,6 @@ func (b Sftp) LoginForm() Form {
|
|||
Name: "port",
|
||||
Type: "number",
|
||||
Placeholder: "Port",
|
||||
|
||||
},
|
||||
FormElement{
|
||||
Id: "sftp_passphrase",
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ func NewBackend(ctx *App, conn map[string]string) (IBackend, error) {
|
|||
// by default, a hacker could use filestash to establish connections outside of what's
|
||||
// define in the config file. We need to prevent this
|
||||
possibilities := make([]map[string]interface{}, 0)
|
||||
for i:=0; i< len(Config.Conn); i++ {
|
||||
for i := 0; i < len(Config.Conn); i++ {
|
||||
d := Config.Conn[i]
|
||||
if d["type"] != conn["type"] {
|
||||
continue
|
||||
|
|
@ -71,12 +71,12 @@ func GetHome(b IBackend, base string) (string, error) {
|
|||
}
|
||||
|
||||
func MapStringInterfaceToMapStringString(m map[string]interface{}) map[string]string {
|
||||
res := make(map[string]string)
|
||||
for key, value := range m {
|
||||
res := make(map[string]string)
|
||||
for key, value := range m {
|
||||
res[key] = fmt.Sprintf("%v", value)
|
||||
if res[key] == "<nil>" {
|
||||
res[key] = ""
|
||||
}
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ import (
|
|||
func OfficeFormater(r io.ReadCloser) (io.ReadCloser, error) {
|
||||
tmpName := fmt.Sprintf("/tmp/docx_%d.docx", rand.Intn(1000000))
|
||||
defer os.Remove(tmpName)
|
||||
f, err := os.OpenFile(tmpName, os.O_CREATE | os.O_WRONLY, os.ModePerm)
|
||||
f, err := os.OpenFile(tmpName, os.O_CREATE|os.O_WRONLY, os.ModePerm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -25,17 +25,21 @@ func OfficeFormater(r io.ReadCloser) (io.ReadCloser, error) {
|
|||
return nil, err
|
||||
}
|
||||
z, err := zip.OpenReader(tmpName)
|
||||
if err != nil {
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer z.Close()
|
||||
}
|
||||
defer z.Close()
|
||||
|
||||
hasData := false
|
||||
content := bytes.NewBuffer([]byte{})
|
||||
for _, f := range z.File {
|
||||
shouldExtract := false
|
||||
if f.Name == "word/document.xml" { shouldExtract = true }
|
||||
if strings.HasPrefix(f.Name, "ppt/slides/slide") { shouldExtract = true }
|
||||
if f.Name == "word/document.xml" {
|
||||
shouldExtract = true
|
||||
}
|
||||
if strings.HasPrefix(f.Name, "ppt/slides/slide") {
|
||||
shouldExtract = true
|
||||
}
|
||||
|
||||
if shouldExtract == false {
|
||||
continue
|
||||
|
|
@ -76,7 +80,6 @@ func OfficeFormater(r io.ReadCloser) (io.ReadCloser, error) {
|
|||
return NewReadCloserFromReader(content), nil
|
||||
}
|
||||
|
||||
|
||||
type WordDoc struct {
|
||||
Text []byte `xml:",innerxml"`
|
||||
}
|
||||
|
|
|
|||
|
|
@ -13,7 +13,7 @@ import (
|
|||
func PdfFormater(r io.ReadCloser) (io.ReadCloser, error) {
|
||||
tmpName := fmt.Sprintf("/tmp/pdf_%d.docx", rand.Intn(1000000))
|
||||
defer os.Remove(tmpName)
|
||||
f, err := os.OpenFile(tmpName, os.O_CREATE | os.O_WRONLY, os.ModePerm)
|
||||
f, err := os.OpenFile(tmpName, os.O_CREATE|os.O_WRONLY, os.ModePerm)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,8 +4,8 @@ import (
|
|||
"database/sql"
|
||||
_ "github.com/mattn/go-sqlite3"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"path/filepath"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
)
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ func init() {
|
|||
}
|
||||
}
|
||||
|
||||
go func(){
|
||||
go func() {
|
||||
autovacuum()
|
||||
}()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import (
|
|||
|
||||
func CanRead(ctx *App) bool {
|
||||
if ctx.Share.Id != "" {
|
||||
return ctx.Share.CanRead
|
||||
return ctx.Share.CanRead
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,20 +20,21 @@ import (
|
|||
)
|
||||
|
||||
const (
|
||||
PHASE_EXPLORE = "PHASE_EXPLORE"
|
||||
PHASE_INDEXING = "PHASE_INDEXING"
|
||||
PHASE_MAINTAIN = "PHASE_MAINTAIN"
|
||||
PHASE_PAUSE = "PHASE_PAUSE"
|
||||
MAX_HEAP_SIZE = 100000
|
||||
PHASE_EXPLORE = "PHASE_EXPLORE"
|
||||
PHASE_INDEXING = "PHASE_INDEXING"
|
||||
PHASE_MAINTAIN = "PHASE_MAINTAIN"
|
||||
PHASE_PAUSE = "PHASE_PAUSE"
|
||||
MAX_HEAP_SIZE = 100000
|
||||
)
|
||||
|
||||
var (
|
||||
SEARCH_ENABLE func() bool
|
||||
SEARCH_TIMEOUT func() time.Duration
|
||||
SEARCH_ENABLE func() bool
|
||||
SEARCH_TIMEOUT func() time.Duration
|
||||
SEARCH_PROCESS_MAX func() int
|
||||
SEARCH_PROCESS_PAR func() int
|
||||
SEARCH_REINDEX func() int
|
||||
CYCLE_TIME func() int
|
||||
INDEXING_EXT func() string
|
||||
SEARCH_REINDEX func() int
|
||||
CYCLE_TIME func() int
|
||||
INDEXING_EXT func() string
|
||||
MAX_INDEXING_FSIZE func() int
|
||||
INDEXING_EXCLUSION = []string{"/node_modules/", "/bower_components/", "/.cache/", "/.npm/", "/.git/"}
|
||||
)
|
||||
|
|
@ -43,7 +44,7 @@ var SProc SearchProcess = SearchProcess{
|
|||
n: -1,
|
||||
}
|
||||
|
||||
func init(){
|
||||
func init() {
|
||||
SEARCH_ENABLE = func() bool {
|
||||
return Config.Get("features.search.enable").Schema(func(f *FormElement) *FormElement {
|
||||
if f == nil {
|
||||
|
|
@ -171,7 +172,8 @@ func init(){
|
|||
for {
|
||||
if SEARCH_ENABLE() == false {
|
||||
select {
|
||||
case <- onChange.Listener: startSearch = SEARCH_ENABLE()
|
||||
case <-onChange.Listener:
|
||||
startSearch = SEARCH_ENABLE()
|
||||
}
|
||||
if startSearch == false {
|
||||
continue
|
||||
|
|
@ -187,7 +189,7 @@ func init(){
|
|||
sidx.mu.Unlock()
|
||||
}
|
||||
}
|
||||
for i:=0; i<SEARCH_PROCESS_PAR(); i++ {
|
||||
for i := 0; i < SEARCH_PROCESS_PAR(); i++ {
|
||||
go runner()
|
||||
}
|
||||
}
|
||||
|
|
@ -206,12 +208,12 @@ func SearchStateful(app *App, path string, keyword string) []File {
|
|||
}
|
||||
|
||||
rows, err := s.DB.Query(
|
||||
"SELECT type, path, size, modTime FROM file WHERE path IN (" +
|
||||
" SELECT path FROM file_index WHERE file_index MATCH ? AND path > ? AND path < ?" +
|
||||
" ORDER BY rank LIMIT 2000" +
|
||||
")",
|
||||
"SELECT type, path, size, modTime FROM file WHERE path IN ("+
|
||||
" SELECT path FROM file_index WHERE file_index MATCH ? AND path > ? AND path < ?"+
|
||||
" ORDER BY rank LIMIT 2000"+
|
||||
")",
|
||||
regexp.MustCompile(`(\.|\-)`).ReplaceAllString(keyword, "\"$1\""),
|
||||
path, path + "~",
|
||||
path, path+"~",
|
||||
)
|
||||
if err != nil {
|
||||
return files
|
||||
|
|
@ -239,15 +241,15 @@ type SearchProcess struct {
|
|||
mu sync.RWMutex
|
||||
}
|
||||
|
||||
func(this *SearchProcess) HintLs(app *App, path string) *SearchIndexer {
|
||||
func (this *SearchProcess) HintLs(app *App, path string) *SearchIndexer {
|
||||
id := GenerateID(app)
|
||||
|
||||
// try to find the search indexer among the existing ones
|
||||
this.mu.RLock()
|
||||
for i:=len(this.idx)-1; i>=0; i-- {
|
||||
for i := len(this.idx) - 1; i >= 0; i-- {
|
||||
if id == this.idx[i].Id {
|
||||
alreadyHasPath := false
|
||||
for j:=0; j<len(this.idx[i].FoldersUnknown); j++ {
|
||||
for j := 0; j < len(this.idx[i].FoldersUnknown); j++ {
|
||||
if this.idx[i].FoldersUnknown[j].Path == path {
|
||||
alreadyHasPath = true
|
||||
break
|
||||
|
|
@ -255,10 +257,10 @@ func(this *SearchProcess) HintLs(app *App, path string) *SearchIndexer {
|
|||
}
|
||||
if alreadyHasPath == false {
|
||||
heap.Push(&this.idx[i].FoldersUnknown, &Document{
|
||||
Type: "directory",
|
||||
Path: path,
|
||||
Type: "directory",
|
||||
Path: path,
|
||||
InitialPath: path,
|
||||
Name: filepath.Base(path),
|
||||
Name: filepath.Base(path),
|
||||
})
|
||||
}
|
||||
ret := &this.idx[i]
|
||||
|
|
@ -268,47 +270,46 @@ func(this *SearchProcess) HintLs(app *App, path string) *SearchIndexer {
|
|||
}
|
||||
this.mu.RUnlock()
|
||||
|
||||
|
||||
// Having all indexers running in memory could be expensive => instead we're cycling a pool
|
||||
search_process_max := SEARCH_PROCESS_MAX()
|
||||
this.mu.Lock()
|
||||
lenIdx := len(this.idx)
|
||||
if lenIdx > 0 && search_process_max > 0 && lenIdx > ( search_process_max - 1) {
|
||||
toDel := this.idx[0 : lenIdx - ( search_process_max - 1)]
|
||||
if lenIdx > 0 && search_process_max > 0 && lenIdx > (search_process_max-1) {
|
||||
toDel := this.idx[0 : lenIdx-(search_process_max-1)]
|
||||
for i := range toDel {
|
||||
toDel[i].DB.Close()
|
||||
}
|
||||
this.idx = this.idx[lenIdx - ( search_process_max - 1) :]
|
||||
this.idx = this.idx[lenIdx-(search_process_max-1):]
|
||||
}
|
||||
// instantiate the new indexer
|
||||
s := NewSearchIndexer(id, app.Backend)
|
||||
heap.Push(&s.FoldersUnknown, &Document{
|
||||
Type: "directory",
|
||||
Path: path,
|
||||
Type: "directory",
|
||||
Path: path,
|
||||
InitialPath: path,
|
||||
Name: filepath.Base(path),
|
||||
Name: filepath.Base(path),
|
||||
})
|
||||
this.idx = append(this.idx, s)
|
||||
this.mu.Unlock()
|
||||
return &s
|
||||
}
|
||||
|
||||
func(this *SearchProcess) HintRm(app *App, path string) {
|
||||
func (this *SearchProcess) HintRm(app *App, path string) {
|
||||
id := GenerateID(app)
|
||||
this.mu.RLock()
|
||||
for i:=len(this.idx)-1; i>=0; i-- {
|
||||
for i := len(this.idx) - 1; i >= 0; i-- {
|
||||
if id == this.idx[i].Id {
|
||||
this.idx[i].DB.Exec("DELETE FROM file WHERE path >= ? AND path < ?", path, path + "~")
|
||||
this.idx[i].DB.Exec("DELETE FROM file WHERE path >= ? AND path < ?", path, path+"~")
|
||||
break
|
||||
}
|
||||
}
|
||||
this.mu.RUnlock()
|
||||
}
|
||||
|
||||
func(this *SearchProcess) HintFile(app *App, path string) {
|
||||
func (this *SearchProcess) HintFile(app *App, path string) {
|
||||
id := GenerateID(app)
|
||||
this.mu.RLock()
|
||||
for i:=len(this.idx)-1; i>=0; i-- {
|
||||
for i := len(this.idx) - 1; i >= 0; i-- {
|
||||
if id == this.idx[i].Id {
|
||||
this.idx[i].DB.Exec("UPDATE file set indexTime = NULL WHERE path = ?", path)
|
||||
break
|
||||
|
|
@ -317,13 +318,12 @@ func(this *SearchProcess) HintFile(app *App, path string) {
|
|||
this.mu.RUnlock()
|
||||
}
|
||||
|
||||
|
||||
func(this *SearchProcess) Peek() *SearchIndexer {
|
||||
func (this *SearchProcess) Peek() *SearchIndexer {
|
||||
if len(this.idx) == 0 {
|
||||
return nil
|
||||
}
|
||||
this.mu.Lock()
|
||||
if this.n >= len(this.idx) - 1 || this.n < 0 {
|
||||
if this.n >= len(this.idx)-1 || this.n < 0 {
|
||||
this.n = 0
|
||||
} else {
|
||||
this.n = this.n + 1
|
||||
|
|
@ -333,7 +333,7 @@ func(this *SearchProcess) Peek() *SearchIndexer {
|
|||
return s
|
||||
}
|
||||
|
||||
func(this *SearchProcess) Reset() {
|
||||
func (this *SearchProcess) Reset() {
|
||||
this.mu.Lock()
|
||||
for i := range this.idx {
|
||||
this.idx[i].DB.Close()
|
||||
|
|
@ -355,22 +355,22 @@ type SearchIndexer struct {
|
|||
}
|
||||
|
||||
func NewSearchIndexer(id string, b IBackend) SearchIndexer {
|
||||
s := SearchIndexer {
|
||||
DBPath: filepath.Join(GetCurrentDir(), FTS_PATH, "fts_" + id + ".sql"),
|
||||
Id: id,
|
||||
Backend: b,
|
||||
s := SearchIndexer{
|
||||
DBPath: filepath.Join(GetCurrentDir(), FTS_PATH, "fts_"+id+".sql"),
|
||||
Id: id,
|
||||
Backend: b,
|
||||
FoldersUnknown: make(HeapDoc, 0, 1),
|
||||
}
|
||||
heap.Init(&s.FoldersUnknown)
|
||||
|
||||
db, err := sql.Open("sqlite3", s.DBPath + "?_journal_mode=wal")
|
||||
db, err := sql.Open("sqlite3", s.DBPath+"?_journal_mode=wal")
|
||||
if err != nil {
|
||||
Log.Warning("search::init can't open database (%v)", err)
|
||||
return s
|
||||
}
|
||||
s.DB = db
|
||||
queryDB := func(sqlQuery string) error {
|
||||
stmt, err := db.Prepare(sqlQuery);
|
||||
stmt, err := db.Prepare(sqlQuery)
|
||||
if err != nil {
|
||||
Log.Warning("search::initschema prepare schema error(%v)", err)
|
||||
return err
|
||||
|
|
@ -407,7 +407,7 @@ func NewSearchIndexer(id string, b IBackend) SearchIndexer {
|
|||
return s
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) Execute(){
|
||||
func (this *SearchIndexer) Execute() {
|
||||
if this.CurrentPhase == "" {
|
||||
time.Sleep(1 * time.Second)
|
||||
this.CurrentPhase = PHASE_EXPLORE
|
||||
|
|
@ -449,7 +449,7 @@ func(this *SearchIndexer) Execute(){
|
|||
return
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) Discover(tx *sql.Tx) bool {
|
||||
func (this *SearchIndexer) Discover(tx *sql.Tx) bool {
|
||||
if this.FoldersUnknown.Len() == 0 {
|
||||
this.CurrentPhase = PHASE_INDEXING
|
||||
return false
|
||||
|
|
@ -478,7 +478,7 @@ func(this *SearchIndexer) Discover(tx *sql.Tx) bool {
|
|||
}
|
||||
hasher := fnv.New32()
|
||||
hasher.Write([]byte(strconv.Itoa(len(files))))
|
||||
for i:=0; i<len(files); i = i+step {
|
||||
for i := 0; i < len(files); i = i + step {
|
||||
hasher.Write([]byte(files[i].Name()))
|
||||
}
|
||||
return base64.StdEncoding.EncodeToString(hasher.Sum(nil))
|
||||
|
|
@ -487,7 +487,7 @@ func(this *SearchIndexer) Discover(tx *sql.Tx) bool {
|
|||
return true
|
||||
}
|
||||
this.lastHash = ""
|
||||
for i:=0; i<this.FoldersUnknown.Len(); i++ {
|
||||
for i := 0; i < this.FoldersUnknown.Len(); i++ {
|
||||
if this.FoldersUnknown[i].Hash == hashFiles && filepath.Base(doc.Path) != filepath.Base(this.FoldersUnknown[i].Path) {
|
||||
this.lastHash = hashFiles
|
||||
return true
|
||||
|
|
@ -505,7 +505,7 @@ func(this *SearchIndexer) Discover(tx *sql.Tx) bool {
|
|||
if err = this.dbInsert(doc.Path, f, tx); err == nil {
|
||||
performPush = true
|
||||
} else if e, ok := err.(sqlite3.Error); ok && e.Code == sqlite3.ErrConstraint {
|
||||
performPush = func(path string) bool{
|
||||
performPush = func(path string) bool {
|
||||
var t string
|
||||
var err error
|
||||
if err := tx.QueryRow("SELECT indexTime FROM file WHERE path = ?", p).Scan(&t); err != nil {
|
||||
|
|
@ -517,7 +517,7 @@ func(this *SearchIndexer) Discover(tx *sql.Tx) bool {
|
|||
Log.Warning("search::discovery invalid_time (%v)", err)
|
||||
return false
|
||||
}
|
||||
if time.Now().Add(time.Duration(- SEARCH_REINDEX()) * time.Hour).Before(tm) {
|
||||
if time.Now().Add(time.Duration(-SEARCH_REINDEX()) * time.Hour).Before(tm) {
|
||||
return false
|
||||
}
|
||||
if _, err = tx.Exec("UPDATE file SET indexTime = ? WHERE path = ?", time.Now(), p); err != nil {
|
||||
|
|
@ -531,12 +531,12 @@ func(this *SearchIndexer) Discover(tx *sql.Tx) bool {
|
|||
}
|
||||
if performPush == true {
|
||||
heap.Push(&this.FoldersUnknown, &Document{
|
||||
Type: "directory",
|
||||
Name: name,
|
||||
Path: p,
|
||||
Size: f.Size(),
|
||||
Type: "directory",
|
||||
Name: name,
|
||||
Path: p,
|
||||
Size: f.Size(),
|
||||
ModTime: f.ModTime(),
|
||||
Hash: hashFiles,
|
||||
Hash: hashFiles,
|
||||
})
|
||||
}
|
||||
} else {
|
||||
|
|
@ -552,16 +552,16 @@ func(this *SearchIndexer) Discover(tx *sql.Tx) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) Indexing(tx *sql.Tx) bool {
|
||||
func (this *SearchIndexer) Indexing(tx *sql.Tx) bool {
|
||||
ext := strings.Split(INDEXING_EXT(), ",")
|
||||
for i:=0; i<len(ext); i++ {
|
||||
for i := 0; i < len(ext); i++ {
|
||||
ext[i] = "'" + strings.TrimSpace(ext[i]) + "'"
|
||||
}
|
||||
|
||||
rows, err := tx.Query(
|
||||
"SELECT path FROM file WHERE (" +
|
||||
" type = 'file' AND size < ? AND filetype IN (" + strings.Join(ext, ",") +") AND indexTime IS NULL " +
|
||||
") LIMIT 2",
|
||||
"SELECT path FROM file WHERE ("+
|
||||
" type = 'file' AND size < ? AND filetype IN ("+strings.Join(ext, ",")+") AND indexTime IS NULL "+
|
||||
") LIMIT 2",
|
||||
MAX_INDEXING_FSIZE(),
|
||||
)
|
||||
if err != nil {
|
||||
|
|
@ -590,12 +590,12 @@ func(this *SearchIndexer) Indexing(tx *sql.Tx) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) updateFile(path string, tx *sql.Tx) error {
|
||||
func (this *SearchIndexer) updateFile(path string, tx *sql.Tx) error {
|
||||
if _, err := tx.Exec("UPDATE file SET indexTime = ? WHERE path = ?", time.Now(), path); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i:=0; i<len(INDEXING_EXCLUSION); i++ {
|
||||
for i := 0; i < len(INDEXING_EXCLUSION); i++ {
|
||||
if strings.Contains(path, INDEXING_EXCLUSION[i]) {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -611,16 +611,26 @@ func(this *SearchIndexer) updateFile(path string, tx *sql.Tx) error {
|
|||
defer reader.Close()
|
||||
|
||||
switch GetMimeType(path) {
|
||||
case "text/plain": reader, err = formater.TxtFormater(reader)
|
||||
case "text/org": reader, err = formater.TxtFormater(reader)
|
||||
case "text/markdown": reader, err = formater.TxtFormater(reader)
|
||||
case "application/x-form": reader, err = formater.TxtFormater(reader)
|
||||
case "application/pdf": reader, err = formater.PdfFormater(reader)
|
||||
case "application/powerpoint": reader, err = formater.OfficeFormater(reader)
|
||||
case "application/vnd.ms-powerpoint": reader, err = formater.OfficeFormater(reader)
|
||||
case "application/word": reader, err = formater.OfficeFormater(reader)
|
||||
case "application/msword": reader, err = formater.OfficeFormater(reader)
|
||||
default: return nil
|
||||
case "text/plain":
|
||||
reader, err = formater.TxtFormater(reader)
|
||||
case "text/org":
|
||||
reader, err = formater.TxtFormater(reader)
|
||||
case "text/markdown":
|
||||
reader, err = formater.TxtFormater(reader)
|
||||
case "application/x-form":
|
||||
reader, err = formater.TxtFormater(reader)
|
||||
case "application/pdf":
|
||||
reader, err = formater.PdfFormater(reader)
|
||||
case "application/powerpoint":
|
||||
reader, err = formater.OfficeFormater(reader)
|
||||
case "application/vnd.ms-powerpoint":
|
||||
reader, err = formater.OfficeFormater(reader)
|
||||
case "application/word":
|
||||
reader, err = formater.OfficeFormater(reader)
|
||||
case "application/msword":
|
||||
reader, err = formater.OfficeFormater(reader)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
|
@ -638,12 +648,12 @@ func(this *SearchIndexer) updateFile(path string, tx *sql.Tx) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) updateFolder(path string, tx *sql.Tx) error {
|
||||
func (this *SearchIndexer) updateFolder(path string, tx *sql.Tx) error {
|
||||
if _, err := tx.Exec("UPDATE file SET indexTime = ? WHERE path = ?", time.Now(), path); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for i:=0; i<len(INDEXING_EXCLUSION); i++ {
|
||||
for i := 0; i < len(INDEXING_EXCLUSION); i++ {
|
||||
if strings.Contains(path, INDEXING_EXCLUSION[i]) {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -652,7 +662,7 @@ func(this *SearchIndexer) updateFolder(path string, tx *sql.Tx) error {
|
|||
// Fetch list of folders as in the remote filesystem
|
||||
currFiles, err := this.Backend.Ls(path)
|
||||
if err != nil {
|
||||
tx.Exec("DELETE FROM file WHERE path >= ? AND path < ?", path, path + "~")
|
||||
tx.Exec("DELETE FROM file WHERE path >= ? AND path < ?", path, path+"~")
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
@ -671,10 +681,10 @@ func(this *SearchIndexer) updateFolder(path string, tx *sql.Tx) error {
|
|||
|
||||
// Perform the DB operation to ensure previousFiles and currFiles are in sync
|
||||
// 1. Find the content that have been created and did not exist before
|
||||
for i:=0; i<len(currFiles); i++ {
|
||||
for i := 0; i < len(currFiles); i++ {
|
||||
currFilenameAlreadyExist := false
|
||||
currFilename := currFiles[i].Name()
|
||||
for j:=0; j<len(previousFiles); j++ {
|
||||
for j := 0; j < len(previousFiles); j++ {
|
||||
if currFilename == previousFiles[j].Name() {
|
||||
if currFiles[i].Size() != previousFiles[j].Size() {
|
||||
err = this.dbUpdate(path, currFiles[i], tx)
|
||||
|
|
@ -692,10 +702,10 @@ func(this *SearchIndexer) updateFolder(path string, tx *sql.Tx) error {
|
|||
}
|
||||
}
|
||||
// 2. Find the content that was existing before but got removed
|
||||
for i:=0; i<len(previousFiles); i++ {
|
||||
for i := 0; i < len(previousFiles); i++ {
|
||||
previousFilenameStillExist := false
|
||||
previousFilename := previousFiles[i].Name()
|
||||
for j:=0; j<len(currFiles); j++ {
|
||||
for j := 0; j < len(currFiles); j++ {
|
||||
if previousFilename == currFiles[j].Name() {
|
||||
previousFilenameStillExist = true
|
||||
break
|
||||
|
|
@ -708,10 +718,10 @@ func(this *SearchIndexer) updateFolder(path string, tx *sql.Tx) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) Consolidate(tx *sql.Tx) bool {
|
||||
func (this *SearchIndexer) Consolidate(tx *sql.Tx) bool {
|
||||
rows, err := tx.Query(
|
||||
"SELECT path, type FROM file WHERE indexTime < ? ORDER BY indexTime DESC LIMIT 5",
|
||||
time.Now().Add(- time.Duration(SEARCH_REINDEX()) * time.Hour),
|
||||
time.Now().Add(-time.Duration(SEARCH_REINDEX())*time.Hour),
|
||||
)
|
||||
if err != nil {
|
||||
if err == sql.ErrNoRows {
|
||||
|
|
@ -744,16 +754,16 @@ func(this *SearchIndexer) Consolidate(tx *sql.Tx) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) dbInsert(parent string, f os.FileInfo, tx *sql.Tx) error {
|
||||
func (this *SearchIndexer) dbInsert(parent string, f os.FileInfo, tx *sql.Tx) error {
|
||||
var name string = f.Name()
|
||||
var err error
|
||||
path := filepath.Join(parent, name)
|
||||
|
||||
if f.IsDir() {
|
||||
_, err = tx.Exec(
|
||||
"INSERT INTO file(path, parent, filename, type, size, modTime, indexTime) " +
|
||||
"VALUES(?, ?, ?, ?, ?, ?, ?)",
|
||||
path + "/",
|
||||
"INSERT INTO file(path, parent, filename, type, size, modTime, indexTime) "+
|
||||
"VALUES(?, ?, ?, ?, ?, ?, ?)",
|
||||
path+"/",
|
||||
parent,
|
||||
name,
|
||||
"directory",
|
||||
|
|
@ -763,8 +773,8 @@ func(this *SearchIndexer) dbInsert(parent string, f os.FileInfo, tx *sql.Tx) err
|
|||
)
|
||||
} else {
|
||||
_, err = tx.Exec(
|
||||
"INSERT INTO file(path, parent, filename, type, size, modTime, indexTime, filetype) " +
|
||||
"VALUES(?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
"INSERT INTO file(path, parent, filename, type, size, modTime, indexTime, filetype) "+
|
||||
"VALUES(?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
path,
|
||||
parent,
|
||||
name,
|
||||
|
|
@ -778,7 +788,7 @@ func(this *SearchIndexer) dbInsert(parent string, f os.FileInfo, tx *sql.Tx) err
|
|||
return err
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) dbUpdate(parent string, f os.FileInfo, tx *sql.Tx) error {
|
||||
func (this *SearchIndexer) dbUpdate(parent string, f os.FileInfo, tx *sql.Tx) error {
|
||||
path := filepath.Join(parent, f.Name())
|
||||
if f.IsDir() {
|
||||
path += "/"
|
||||
|
|
@ -790,14 +800,14 @@ func(this *SearchIndexer) dbUpdate(parent string, f os.FileInfo, tx *sql.Tx) err
|
|||
return err
|
||||
}
|
||||
|
||||
func(this *SearchIndexer) dbDelete(parent string, f os.FileInfo, tx *sql.Tx) error {
|
||||
func (this *SearchIndexer) dbDelete(parent string, f os.FileInfo, tx *sql.Tx) error {
|
||||
path := filepath.Join(parent, f.Name())
|
||||
if f.IsDir() {
|
||||
path += "/"
|
||||
}
|
||||
_, err := tx.Exec(
|
||||
"DELETE FROM file WHERE path >= ? AND path < ?",
|
||||
path, path + "~",
|
||||
path, path+"~",
|
||||
)
|
||||
return err
|
||||
}
|
||||
|
|
@ -817,8 +827,9 @@ type Document struct {
|
|||
|
||||
// https://golang.org/pkg/container/heap/
|
||||
type HeapDoc []*Document
|
||||
func(h HeapDoc) Len() int { return len(h) }
|
||||
func(h HeapDoc) Less(i, j int) bool {
|
||||
|
||||
func (h HeapDoc) Len() int { return len(h) }
|
||||
func (h HeapDoc) Less(i, j int) bool {
|
||||
if h[i].Priority != 0 || h[j].Priority != 0 {
|
||||
return h[i].Priority < h[j].Priority
|
||||
}
|
||||
|
|
@ -826,7 +837,7 @@ func(h HeapDoc) Less(i, j int) bool {
|
|||
scoreB := len(strings.Split(h[j].Path, "/")) / len(strings.Split(h[j].InitialPath, "/"))
|
||||
return scoreA < scoreB
|
||||
}
|
||||
func(h HeapDoc) Swap(i, j int) {
|
||||
func (h HeapDoc) Swap(i, j int) {
|
||||
a := h[i]
|
||||
h[i] = h[j]
|
||||
h[j] = a
|
||||
|
|
|
|||
|
|
@ -39,7 +39,7 @@ func scoreBoostForPath(p string) int {
|
|||
|
||||
func scoreBoostForFilesInDirectory(f []os.FileInfo) int {
|
||||
s := 0
|
||||
for i:=0; i<len(f); i++ {
|
||||
for i := 0; i < len(f); i++ {
|
||||
name := f[i].Name()
|
||||
if f[i].IsDir() == false {
|
||||
if strings.HasSuffix(name, ".org") {
|
||||
|
|
@ -62,7 +62,7 @@ func scoreBoostForFilesInDirectory(f []os.FileInfo) int {
|
|||
}
|
||||
|
||||
func scoreBoostOnDepth(p string) int {
|
||||
return - strings.Count(p, "/")
|
||||
return -strings.Count(p, "/")
|
||||
}
|
||||
|
||||
func SearchStateLess(app *App, path string, keyword string) []File {
|
||||
|
|
@ -70,7 +70,7 @@ func SearchStateLess(app *App, path string, keyword string) []File {
|
|||
toVisit := []PathQuandidate{PathQuandidate{path, 0}}
|
||||
MAX_SEARCH_TIME := SEARCH_TIMEOUT()
|
||||
|
||||
for start := time.Now() ; time.Since(start) < MAX_SEARCH_TIME; {
|
||||
for start := time.Now(); time.Since(start) < MAX_SEARCH_TIME; {
|
||||
if len(toVisit) == 0 {
|
||||
return files
|
||||
}
|
||||
|
|
@ -88,11 +88,11 @@ func SearchStateLess(app *App, path string, keyword string) []File {
|
|||
}
|
||||
|
||||
score1 := scoreBoostForFilesInDirectory(f)
|
||||
for i:=0; i<len(f); i++ {
|
||||
for i := 0; i < len(f); i++ {
|
||||
name := f[i].Name()
|
||||
// keyword matching
|
||||
isAMatch := true
|
||||
for _, key := range strings.Split(keyword, " "){
|
||||
for _, key := range strings.Split(keyword, " ") {
|
||||
if strings.Contains(strings.ToLower(name), strings.ToLower(key)) == false {
|
||||
isAMatch = false
|
||||
}
|
||||
|
|
@ -124,17 +124,17 @@ func SearchStateLess(app *App, path string, keyword string) []File {
|
|||
score += score1
|
||||
score += score2
|
||||
score += currentPath.Score
|
||||
t := make([]PathQuandidate, len(toVisit) + 1)
|
||||
t := make([]PathQuandidate, len(toVisit)+1)
|
||||
k := 0
|
||||
for k=0; k<len(toVisit); k++{
|
||||
for k = 0; k < len(toVisit); k++ {
|
||||
if score > toVisit[k].Score {
|
||||
break
|
||||
}
|
||||
t[k] = toVisit[k]
|
||||
}
|
||||
t[k] = PathQuandidate{fullpath, score}
|
||||
for k=k+1; k<len(toVisit) + 1; k++ {
|
||||
t[k] = toVisit[k - 1]
|
||||
for k = k + 1; k < len(toVisit)+1; k++ {
|
||||
t[k] = toVisit[k-1]
|
||||
}
|
||||
toVisit = t
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,16 +1,16 @@
|
|||
package model
|
||||
|
||||
import (
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"bytes"
|
||||
"crypto/tls"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"github.com/mattn/go-sqlite3"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
"gopkg.in/gomail.v2"
|
||||
"net/http"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
|
@ -95,26 +95,26 @@ func ShareUpsert(p *Share) error {
|
|||
return err
|
||||
}
|
||||
j, _ := json.Marshal(&struct {
|
||||
Password *string `json:"password,omitempty"`
|
||||
Users *string `json:"users,omitempty"`
|
||||
Expire *int64 `json:"expire,omitempty"`
|
||||
Url *string `json:"url,omitempty"`
|
||||
CanShare bool `json:"can_share"`
|
||||
CanManageOwn bool `json:"can_manage_own"`
|
||||
CanRead bool `json:"can_read"`
|
||||
CanWrite bool `json:"can_write"`
|
||||
CanUpload bool `json:"can_upload"`
|
||||
}{
|
||||
Password: p.Password,
|
||||
Users: p.Users,
|
||||
Expire: p.Expire,
|
||||
Url: p.Url,
|
||||
CanShare: p.CanShare,
|
||||
Password *string `json:"password,omitempty"`
|
||||
Users *string `json:"users,omitempty"`
|
||||
Expire *int64 `json:"expire,omitempty"`
|
||||
Url *string `json:"url,omitempty"`
|
||||
CanShare bool `json:"can_share"`
|
||||
CanManageOwn bool `json:"can_manage_own"`
|
||||
CanRead bool `json:"can_read"`
|
||||
CanWrite bool `json:"can_write"`
|
||||
CanUpload bool `json:"can_upload"`
|
||||
}{
|
||||
Password: p.Password,
|
||||
Users: p.Users,
|
||||
Expire: p.Expire,
|
||||
Url: p.Url,
|
||||
CanShare: p.CanShare,
|
||||
CanManageOwn: p.CanManageOwn,
|
||||
CanRead: p.CanRead,
|
||||
CanWrite: p.CanWrite,
|
||||
CanUpload: p.CanUpload,
|
||||
})
|
||||
CanRead: p.CanRead,
|
||||
CanWrite: p.CanWrite,
|
||||
CanUpload: p.CanUpload,
|
||||
})
|
||||
_, err = stmt.Exec(p.Id, p.Backend, p.Path, j, p.Auth)
|
||||
return err
|
||||
}
|
||||
|
|
@ -136,7 +136,7 @@ func ShareProofVerifier(s Share, proof Proof) (Proof, error) {
|
|||
return p, NewError("No password required", 400)
|
||||
}
|
||||
|
||||
v, ok := ShareProofVerifierPassword(*s.Password, proof.Value);
|
||||
v, ok := ShareProofVerifierPassword(*s.Password, proof.Value)
|
||||
if ok == false {
|
||||
time.Sleep(1000 * time.Millisecond)
|
||||
return p, ErrInvalidPassword
|
||||
|
|
@ -157,12 +157,12 @@ func ShareProofVerifier(s Share, proof Proof) (Proof, error) {
|
|||
user := v
|
||||
|
||||
// prepare the verification code
|
||||
stmt, err := DB.Prepare("INSERT INTO Verification(key, code) VALUES(?, ?)");
|
||||
stmt, err := DB.Prepare("INSERT INTO Verification(key, code) VALUES(?, ?)")
|
||||
if err != nil {
|
||||
return p, err
|
||||
}
|
||||
code := RandomString(4)
|
||||
if _, err := stmt.Exec("email::" + user, code); err != nil {
|
||||
if _, err := stmt.Exec("email::"+user, code); err != nil {
|
||||
return p, err
|
||||
}
|
||||
|
||||
|
|
@ -170,7 +170,7 @@ func ShareProofVerifier(s Share, proof Proof) (Proof, error) {
|
|||
var b bytes.Buffer
|
||||
t := template.New("email")
|
||||
t.Parse(TmplEmailVerification())
|
||||
t.Execute(&b, struct{
|
||||
t.Execute(&b, struct {
|
||||
Code string
|
||||
Username string
|
||||
}{code, networkDriveUsernameEnc(user)})
|
||||
|
|
@ -188,10 +188,10 @@ func ShareProofVerifier(s Share, proof Proof) (Proof, error) {
|
|||
From string `json:"from"`
|
||||
}{
|
||||
Hostname: Config.Get("email.server").String(),
|
||||
Port: Config.Get("email.port").Int(),
|
||||
Port: Config.Get("email.port").Int(),
|
||||
Username: Config.Get("email.username").String(),
|
||||
Password: Config.Get("email.password").String(),
|
||||
From: Config.Get("email.from").String(),
|
||||
From: Config.Get("email.from").String(),
|
||||
}
|
||||
|
||||
m := gomail.NewMessage()
|
||||
|
|
@ -308,7 +308,7 @@ func ShareProofCalculateRemainings(ref []Proof, mem []Proof) []Proof {
|
|||
for j := 0; j < len(mem); j++ {
|
||||
if shareProofAreEquivalent(ref[i], mem[j]) {
|
||||
keep = false
|
||||
break;
|
||||
break
|
||||
}
|
||||
}
|
||||
if keep {
|
||||
|
|
@ -319,8 +319,7 @@ func ShareProofCalculateRemainings(ref []Proof, mem []Proof) []Proof {
|
|||
return remainingProof
|
||||
}
|
||||
|
||||
|
||||
func shareProofAreEquivalent(ref Proof, p Proof) bool {
|
||||
func shareProofAreEquivalent(ref Proof, p Proof) bool {
|
||||
if ref.Key != p.Key {
|
||||
return false
|
||||
} else if ref.Value != "" && ref.Value == p.Value {
|
||||
|
|
@ -328,7 +327,7 @@ func shareProofAreEquivalent(ref Proof, p Proof) bool {
|
|||
}
|
||||
for _, chunk := range strings.Split(ref.Value, ",") {
|
||||
chunk = strings.Trim(chunk, " ")
|
||||
if p.Id == Hash(ref.Key + "::" + chunk, 20) {
|
||||
if p.Id == Hash(ref.Key+"::"+chunk, 20) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
|
@ -634,5 +633,5 @@ func TmplEmailVerification() string {
|
|||
}
|
||||
|
||||
func networkDriveUsernameEnc(email string) string {
|
||||
return email + "[" + Hash(email + SECRET_KEY_DERIVATE_FOR_HASH, 10) + "]"
|
||||
return email + "[" + Hash(email+SECRET_KEY_DERIVATE_FOR_HASH, 10) + "]"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,8 +12,8 @@ import (
|
|||
"fmt"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"github.com/mickael-kerjean/net/webdav"
|
||||
"net/http"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
|
@ -21,6 +21,7 @@ import (
|
|||
)
|
||||
|
||||
const DAVCachePath = "data/cache/webdav/"
|
||||
|
||||
var (
|
||||
cachePath string
|
||||
webdavCache AppCache
|
||||
|
|
@ -63,10 +64,10 @@ func (this WebdavFs) Mkdir(ctx context.Context, name string, perm os.FileMode) e
|
|||
}
|
||||
|
||||
func (this *WebdavFs) OpenFile(ctx context.Context, name string, flag int, perm os.FileMode) (webdav.File, error) {
|
||||
cachePath := fmt.Sprintf("%stmp_%s", cachePath, Hash(this.id + name, 20))
|
||||
cachePath := fmt.Sprintf("%stmp_%s", cachePath, Hash(this.id+name, 20))
|
||||
fwriteFile := func() *os.File {
|
||||
if this.req.Method == "PUT" {
|
||||
f, err := os.OpenFile(cachePath+"_writer", os.O_WRONLY|os.O_CREATE|os.O_EXCL, os.ModePerm);
|
||||
f, err := os.OpenFile(cachePath+"_writer", os.O_WRONLY|os.O_CREATE|os.O_EXCL, os.ModePerm)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
|
@ -82,10 +83,10 @@ func (this *WebdavFs) OpenFile(ctx context.Context, name string, flag int, perm
|
|||
return nil, os.ErrNotExist
|
||||
}
|
||||
this.webdavFile = &WebdavFile{
|
||||
path: name,
|
||||
path: name,
|
||||
backend: this.backend,
|
||||
cache: cachePath,
|
||||
fwrite: fwriteFile(),
|
||||
cache: cachePath,
|
||||
fwrite: fwriteFile(),
|
||||
}
|
||||
return this.webdavFile, nil
|
||||
}
|
||||
|
|
@ -116,9 +117,9 @@ func (this *WebdavFs) Stat(ctx context.Context, name string) (os.FileInfo, error
|
|||
return nil, os.ErrNotExist
|
||||
}
|
||||
this.webdavFile = &WebdavFile{
|
||||
path: fullname,
|
||||
path: fullname,
|
||||
backend: this.backend,
|
||||
cache: fmt.Sprintf("%stmp_%s", cachePath, Hash(this.id + name, 20)),
|
||||
cache: fmt.Sprintf("%stmp_%s", cachePath, Hash(this.id+name, 20)),
|
||||
}
|
||||
return this.webdavFile.Stat()
|
||||
}
|
||||
|
|
@ -134,7 +135,6 @@ func (this WebdavFs) fullpath(path string) string {
|
|||
return p
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* Implement a webdav.File and os.Stat : https://godoc.org/golang.org/x/net/webdav#File
|
||||
*/
|
||||
|
|
@ -177,7 +177,7 @@ func (this *WebdavFile) Close() error {
|
|||
|
||||
func (this *WebdavFile) Seek(offset int64, whence int) (int64, error) {
|
||||
if this.fread == nil {
|
||||
this.fread = this.pull_remote_file();
|
||||
this.fread = this.pull_remote_file()
|
||||
if this.fread == nil {
|
||||
return offset, ErrNotFound
|
||||
}
|
||||
|
|
@ -239,7 +239,7 @@ func (this *WebdavFile) Write(p []byte) (int, error) {
|
|||
}
|
||||
|
||||
func (this WebdavFile) pull_remote_file() *os.File {
|
||||
filename := this.cache+"_reader"
|
||||
filename := this.cache + "_reader"
|
||||
if f, err := os.OpenFile(filename, os.O_RDONLY, os.ModePerm); err == nil {
|
||||
return f
|
||||
}
|
||||
|
|
@ -247,7 +247,7 @@ func (this WebdavFile) pull_remote_file() *os.File {
|
|||
if reader, err := this.backend.Cat(this.path); err == nil {
|
||||
io.Copy(f, reader)
|
||||
f.Close()
|
||||
webdavCache.SetKey(this.cache + "_reader", nil)
|
||||
webdavCache.SetKey(this.cache+"_reader", nil)
|
||||
reader.Close()
|
||||
if f, err = os.OpenFile(filename, os.O_RDONLY, os.ModePerm); err == nil {
|
||||
return f
|
||||
|
|
@ -264,15 +264,15 @@ func (this *WebdavFile) push_to_remote_if_needed() error {
|
|||
return nil
|
||||
}
|
||||
this.fwrite.Close()
|
||||
f, err := os.OpenFile(this.cache + "_writer", os.O_RDONLY, os.ModePerm);
|
||||
f, err := os.OpenFile(this.cache+"_writer", os.O_RDONLY, os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
err = this.backend.Save(this.path, f)
|
||||
if err == nil {
|
||||
if err = os.Rename(this.cache + "_writer", this.cache + "_reader"); err == nil {
|
||||
if err = os.Rename(this.cache+"_writer", this.cache+"_reader"); err == nil {
|
||||
this.fwrite = nil
|
||||
webdavCache.SetKey(this.cache + "_reader", nil)
|
||||
webdavCache.SetKey(this.cache+"_reader", nil)
|
||||
}
|
||||
}
|
||||
f.Close()
|
||||
|
|
@ -329,6 +329,7 @@ func (this WebdavFile) ETag(ctx context.Context) (string, error) {
|
|||
}
|
||||
|
||||
var lock webdav.LockSystem
|
||||
|
||||
func NewWebdavLock() webdav.LockSystem {
|
||||
if lock == nil {
|
||||
lock = webdav.NewMemLS()
|
||||
|
|
|
|||
|
|
@ -1,25 +1,25 @@
|
|||
package plugin
|
||||
|
||||
import (
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_starter_http"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_starter_tor"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_handler_console"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_video_transcoder"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_editor_onlyoffice"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_handler_syncthing"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_image_light"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_ftp"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_backblaze"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_dav"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_webdav"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_dropbox"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_ftp"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_ldap"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_mysql"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_s3"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_ldap"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_dropbox"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_webdav"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_editor_onlyoffice"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_handler_console"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_handler_syncthing"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_image_light"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_starter_http"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_starter_tor"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_video_transcoder"
|
||||
//_ "github.com/mickael-kerjean/filestash/server/plugin/plg_backend_samba"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_security_scanner"
|
||||
_ "github.com/mickael-kerjean/filestash/server/plugin/plg_security_svg"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
|
|
|||
|
|
@ -3,8 +3,8 @@ package plg_backend_backblaze
|
|||
import (
|
||||
"bytes"
|
||||
"crypto/sha1"
|
||||
"encoding/json"
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"io"
|
||||
|
|
@ -26,11 +26,11 @@ var (
|
|||
type Backblaze struct {
|
||||
params map[string]string
|
||||
Buckets map[string]string
|
||||
ApiUrl string `json:"apiUrl"`
|
||||
DownloadUrl string `json:"downloadUrl"`
|
||||
AccountId string `json:"accountId"`
|
||||
Token string `json:"authorizationToken"`
|
||||
Status int `json:"status"`
|
||||
ApiUrl string `json:"apiUrl"`
|
||||
DownloadUrl string `json:"downloadUrl"`
|
||||
AccountId string `json:"accountId"`
|
||||
Token string `json:"authorizationToken"`
|
||||
Status int `json:"status"`
|
||||
}
|
||||
|
||||
type BackblazeError struct {
|
||||
|
|
@ -57,7 +57,7 @@ func (this Backblaze) Init(params map[string]string, app *App) (IBackend, error)
|
|||
}
|
||||
|
||||
// To perform some query, we need to first know things like where we will have to query, get a token, ...
|
||||
res, err := this.request("GET", "https://api.backblazeb2.com/b2api/v2/b2_authorize_account", nil, nil);
|
||||
res, err := this.request("GET", "https://api.backblazeb2.com/b2api/v2/b2_authorize_account", nil, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -74,7 +74,7 @@ func (this Backblaze) Init(params map[string]string, app *App) (IBackend, error)
|
|||
// BucketId is just some internal ref as people expect to see the bucketName
|
||||
res, err = this.request(
|
||||
"POST",
|
||||
this.ApiUrl + "/b2api/v2/b2_list_buckets",
|
||||
this.ApiUrl+"/b2api/v2/b2_list_buckets",
|
||||
strings.NewReader(fmt.Sprintf(
|
||||
`{"accountId":"%s"}`,
|
||||
this.AccountId,
|
||||
|
|
@ -90,7 +90,7 @@ func (this Backblaze) Init(params map[string]string, app *App) (IBackend, error)
|
|||
return nil, err
|
||||
}
|
||||
var buckets struct {
|
||||
Buckets []struct{
|
||||
Buckets []struct {
|
||||
BucketId string `json:"bucketId"`
|
||||
BucketName string `json:"bucketName"`
|
||||
} `json:"buckets"`
|
||||
|
|
@ -111,9 +111,9 @@ func (this Backblaze) LoginForm() Form {
|
|||
return Form{
|
||||
Elmnts: []FormElement{
|
||||
FormElement{
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "backblaze",
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "backblaze",
|
||||
},
|
||||
FormElement{
|
||||
Name: "username",
|
||||
|
|
@ -148,10 +148,10 @@ func (this Backblaze) Ls(path string) ([]os.FileInfo, error) {
|
|||
Delimiter string `json:"delimiter"`
|
||||
MaxFileCount int `json:"maxFileCount"`
|
||||
Prefix string `json:"prefix"`
|
||||
}{ p.BucketId, "/", 10000, p.Prefix })
|
||||
}{p.BucketId, "/", 10000, p.Prefix})
|
||||
res, err := this.request(
|
||||
"POST",
|
||||
this.ApiUrl + "/b2api/v2/b2_list_file_names",
|
||||
this.ApiUrl+"/b2api/v2/b2_list_file_names",
|
||||
bytes.NewReader(reqJSON),
|
||||
nil,
|
||||
)
|
||||
|
|
@ -194,7 +194,7 @@ func (this Backblaze) Ls(path string) ([]os.FileInfo, error) {
|
|||
func (this Backblaze) Cat(path string) (io.ReadCloser, error) {
|
||||
res, err := this.request(
|
||||
"GET",
|
||||
this.DownloadUrl + "/file" + path + "?Authorization=" + this.Token,
|
||||
this.DownloadUrl+"/file"+path+"?Authorization="+this.Token,
|
||||
nil, nil,
|
||||
)
|
||||
if err != nil {
|
||||
|
|
@ -216,7 +216,7 @@ func (this Backblaze) Mkdir(path string) error {
|
|||
}
|
||||
res, err := this.request(
|
||||
"POST",
|
||||
this.ApiUrl + "/b2api/v2/b2_create_bucket",
|
||||
this.ApiUrl+"/b2api/v2/b2_create_bucket",
|
||||
strings.NewReader(fmt.Sprintf(
|
||||
`{"accountId": "%s", "bucketName": "%s", "bucketType": "allPrivate"}`,
|
||||
this.AccountId,
|
||||
|
|
@ -254,7 +254,7 @@ func (this Backblaze) Rm(path string) error {
|
|||
BackblazeCache.Del(this.params) // cache invalidation
|
||||
res, err := this.request(
|
||||
"POST",
|
||||
this.ApiUrl + "/b2api/v2/b2_delete_bucket",
|
||||
this.ApiUrl+"/b2api/v2/b2_delete_bucket",
|
||||
strings.NewReader(fmt.Sprintf(
|
||||
`{"accountId": "%s", "bucketId": "%s"}`,
|
||||
this.AccountId,
|
||||
|
|
@ -284,7 +284,7 @@ func (this Backblaze) Rm(path string) error {
|
|||
// Step 1: find every files in a folder: b2_list_file_names
|
||||
res, err := this.request(
|
||||
"POST",
|
||||
this.ApiUrl + "/b2api/v2/b2_list_file_names",
|
||||
this.ApiUrl+"/b2api/v2/b2_list_file_names",
|
||||
strings.NewReader(fmt.Sprintf(
|
||||
`{"bucketId": "%s", "maxFileCount": 10000, "delimiter": "/", "prefix": "%s"}`,
|
||||
p.BucketId, p.Prefix,
|
||||
|
|
@ -312,7 +312,7 @@ func (this Backblaze) Rm(path string) error {
|
|||
for i := range bRes.Files {
|
||||
res, err := this.request(
|
||||
"POST",
|
||||
this.ApiUrl + "/b2api/v2/b2_delete_file_version",
|
||||
this.ApiUrl+"/b2api/v2/b2_delete_file_version",
|
||||
strings.NewReader(fmt.Sprintf(
|
||||
`{"fileName": "%s", "fileId": "%s"}`,
|
||||
bRes.Files[i].FileName, bRes.Files[i].FileId,
|
||||
|
|
@ -347,7 +347,7 @@ func (this Backblaze) Touch(path string) error {
|
|||
// Step 1: get the URL we will proceed to the upload
|
||||
res, err := this.request(
|
||||
"POST",
|
||||
this.ApiUrl + "/b2api/v2/b2_get_upload_url",
|
||||
this.ApiUrl+"/b2api/v2/b2_get_upload_url",
|
||||
strings.NewReader(fmt.Sprintf(`{"bucketId": "%s"}`, p.BucketId)),
|
||||
nil,
|
||||
)
|
||||
|
|
@ -372,7 +372,7 @@ func (this Backblaze) Touch(path string) error {
|
|||
"POST",
|
||||
resBody.UploadUrl,
|
||||
nil,
|
||||
func(r *http.Request){
|
||||
func(r *http.Request) {
|
||||
r.Header.Set("Authorization", resBody.Token)
|
||||
r.Header.Set("X-Bz-File-Name", url.QueryEscape(p.Prefix))
|
||||
r.Header.Set("Content-Type", "application/octet-stream")
|
||||
|
|
@ -404,7 +404,7 @@ func (this Backblaze) Save(path string, file io.Reader) error {
|
|||
// Step 1: get the URL we will proceed to the upload
|
||||
res, err := this.request(
|
||||
"POST",
|
||||
this.ApiUrl + "/b2api/v2/b2_get_upload_url",
|
||||
this.ApiUrl+"/b2api/v2/b2_get_upload_url",
|
||||
strings.NewReader(fmt.Sprintf(`{"bucketId": "%s"}`, p.BucketId)),
|
||||
nil,
|
||||
)
|
||||
|
|
@ -431,15 +431,17 @@ func (this Backblaze) Save(path string, file io.Reader) error {
|
|||
Sha1 []byte
|
||||
}{}
|
||||
backblazeFileDetail.path = GetAbsolutePath(BackblazeCachePath + "data_" + QuickString(20) + ".dat")
|
||||
f, err := os.OpenFile(backblazeFileDetail.path, os.O_CREATE | os.O_RDWR, os.ModePerm)
|
||||
f, err := os.OpenFile(backblazeFileDetail.path, os.O_CREATE|os.O_RDWR, os.ModePerm)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
defer os.Remove(backblazeFileDetail.path)
|
||||
io.Copy(f, file)
|
||||
if obj, ok := file.(io.Closer); ok { obj.Close() }
|
||||
s, err := f.Stat();
|
||||
if obj, ok := file.(io.Closer); ok {
|
||||
obj.Close()
|
||||
}
|
||||
s, err := f.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -457,7 +459,7 @@ func (this Backblaze) Save(path string, file io.Reader) error {
|
|||
"POST",
|
||||
resBody.UploadUrl,
|
||||
f,
|
||||
func(r *http.Request){
|
||||
func(r *http.Request) {
|
||||
r.ContentLength = backblazeFileDetail.ContentLength
|
||||
r.Header.Set("Authorization", resBody.Token)
|
||||
r.Header.Set("X-Bz-File-Name", url.QueryEscape(p.Prefix))
|
||||
|
|
@ -495,7 +497,7 @@ func (this Backblaze) Meta(path string) Metadata {
|
|||
return m
|
||||
}
|
||||
|
||||
func (this Backblaze) request(method string, url string, body io.Reader, fn func(req *http.Request)) (*http.Response, error){
|
||||
func (this Backblaze) request(method string, url string, body io.Reader, fn func(req *http.Request)) (*http.Response, error) {
|
||||
req, err := http.NewRequest(method, url, body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
@ -514,7 +516,7 @@ func (this Backblaze) request(method string, url string, body io.Reader, fn func
|
|||
} else {
|
||||
req.Header.Set("Authorization", this.Token)
|
||||
}
|
||||
req.Header.Set("User-Agent", "Filestash " + APP_VERSION + "." + BUILD_DATE)
|
||||
req.Header.Set("User-Agent", "Filestash "+APP_VERSION+"."+BUILD_DATE)
|
||||
req.Header.Set("Accept", "application/json")
|
||||
//req.Header.Set("X-Bz-Test-Mode", "force_cap_exceeded")
|
||||
if fn != nil {
|
||||
|
|
|
|||
|
|
@ -25,14 +25,14 @@ const (
|
|||
func init() {
|
||||
DavCache = NewAppCache(2, 1)
|
||||
Backend.Register(CARDDAV, Dav{})
|
||||
Backend.Register(CALDAV, Dav{})
|
||||
Backend.Register(CALDAV, Dav{})
|
||||
}
|
||||
|
||||
type Dav struct {
|
||||
which string
|
||||
url string
|
||||
params map[string]string
|
||||
cache map[string]interface{}
|
||||
which string
|
||||
url string
|
||||
params map[string]string
|
||||
cache map[string]interface{}
|
||||
}
|
||||
|
||||
func (this Dav) Init(params map[string]string, app *App) (IBackend, error) {
|
||||
|
|
@ -41,9 +41,9 @@ func (this Dav) Init(params map[string]string, app *App) (IBackend, error) {
|
|||
return backend, nil
|
||||
}
|
||||
backend := Dav{
|
||||
url: params["url"],
|
||||
which: params["type"],
|
||||
params: params,
|
||||
url: params["url"],
|
||||
which: params["type"],
|
||||
params: params,
|
||||
}
|
||||
DavCache.Set(params, &backend)
|
||||
return backend, nil
|
||||
|
|
@ -53,9 +53,9 @@ func (this Dav) LoginForm() Form {
|
|||
return Form{
|
||||
Elmnts: []FormElement{
|
||||
FormElement{
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: this.which,
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: this.which,
|
||||
},
|
||||
FormElement{
|
||||
Name: "url",
|
||||
|
|
@ -296,9 +296,9 @@ func (this Dav) Touch(path string) error {
|
|||
|
||||
func (this Dav) Save(path string, file io.Reader) error {
|
||||
var uriInit string
|
||||
var uri string
|
||||
var err error
|
||||
var res *http.Response
|
||||
var uri string
|
||||
var err error
|
||||
var res *http.Response
|
||||
|
||||
if uriInit, err = this.getResourceURI(path); err != nil {
|
||||
uriInit = ""
|
||||
|
|
@ -340,21 +340,21 @@ func (this Dav) Save(path string, file io.Reader) error {
|
|||
|
||||
func (this Dav) Meta(path string) Metadata {
|
||||
m := Metadata{
|
||||
CanMove: NewBool(false),
|
||||
HideExtension: NewBool(true),
|
||||
CanMove: NewBool(false),
|
||||
HideExtension: NewBool(true),
|
||||
}
|
||||
if path == "/" {
|
||||
m.CanCreateFile = NewBool(false)
|
||||
m.CanCreateFile = NewBool(false)
|
||||
m.CanCreateDirectory = NewBool(true)
|
||||
m.CanRename = NewBool(false)
|
||||
m.CanUpload = NewBool(false)
|
||||
m.RefreshOnCreate = NewBool(false)
|
||||
m.CanRename = NewBool(false)
|
||||
m.CanUpload = NewBool(false)
|
||||
m.RefreshOnCreate = NewBool(false)
|
||||
} else {
|
||||
m.CanCreateFile = NewBool(true)
|
||||
m.CanCreateFile = NewBool(true)
|
||||
m.CanCreateDirectory = NewBool(false)
|
||||
m.CanRename = NewBool(true)
|
||||
m.CanUpload = NewBool(true)
|
||||
m.RefreshOnCreate = NewBool(true)
|
||||
m.CanRename = NewBool(true)
|
||||
m.CanUpload = NewBool(true)
|
||||
m.RefreshOnCreate = NewBool(true)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
|
@ -448,7 +448,7 @@ func (this Dav) getCollections() ([]DavCollection, error) {
|
|||
<getcontenttype/>
|
||||
<displayname />
|
||||
</prop>
|
||||
</propfind>`), func (req *http.Request) {
|
||||
</propfind>`), func(req *http.Request) {
|
||||
req.Header.Add("Depth", "1")
|
||||
req.Header.Add("Content-Type", "application/xml")
|
||||
}); err != nil {
|
||||
|
|
@ -497,7 +497,7 @@ func (this Dav) getCollectionURI(path string) (string, error) {
|
|||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
for i:=0; i<len(coll); i++ {
|
||||
for i := 0; i < len(coll); i++ {
|
||||
if coll[i].Name == string(p[0]) {
|
||||
return this.parseURL(coll[i].Url)
|
||||
}
|
||||
|
|
@ -536,7 +536,7 @@ func (this Dav) getResources(path string) ([]DavResource, error) {
|
|||
</D:prop>
|
||||
</C:calendar-query>`
|
||||
}
|
||||
return strings.NewReader(query);
|
||||
return strings.NewReader(query)
|
||||
}(),
|
||||
func(req *http.Request) {
|
||||
req.Header.Add("Depth", "1")
|
||||
|
|
@ -569,7 +569,7 @@ func (this Dav) getResources(path string) ([]DavResource, error) {
|
|||
name += ".vcf"
|
||||
} else if this.which == CALDAV {
|
||||
strToInt := func(chunk string) int {
|
||||
ret, _ := strconv.Atoi(chunk);
|
||||
ret, _ := strconv.Atoi(chunk)
|
||||
return ret
|
||||
}
|
||||
for _, line := range strings.Split(r.Responses[i].Ical, "\n") {
|
||||
|
|
@ -581,7 +581,7 @@ func (this Dav) getResources(path string) ([]DavResource, error) {
|
|||
c := strings.TrimSuffix(strings.TrimSpace(strings.TrimPrefix(line, "DTSTART:")), "Z")
|
||||
if len(c) == 15 && t == 0 {
|
||||
t = time.Date(
|
||||
strToInt(c[0:4]), time.Month(strToInt(c[4:6]) + 1), strToInt(c[6:8]), // date
|
||||
strToInt(c[0:4]), time.Month(strToInt(c[4:6])+1), strToInt(c[6:8]), // date
|
||||
strToInt(c[9:11]), strToInt(c[11:13]), strToInt(c[13:15]), // time
|
||||
0, time.UTC,
|
||||
).Unix()
|
||||
|
|
@ -590,8 +590,8 @@ func (this Dav) getResources(path string) ([]DavResource, error) {
|
|||
c := strings.TrimSpace(strings.TrimPrefix(line, "DTSTART;VALUE=DATE:"))
|
||||
if len(c) == 8 && t == 0 {
|
||||
t = time.Date(
|
||||
strToInt(c[0:4]), time.Month(strToInt(c[4:6]) + 1), strToInt(c[6:8]), // date
|
||||
0,0,0, // time
|
||||
strToInt(c[0:4]), time.Month(strToInt(c[4:6])+1), strToInt(c[6:8]), // date
|
||||
0, 0, 0, // time
|
||||
0, time.UTC,
|
||||
).Unix()
|
||||
}
|
||||
|
|
@ -613,7 +613,7 @@ func (this Dav) getResourceURI(path string) (string, error) {
|
|||
}
|
||||
|
||||
var resources []DavResource
|
||||
var err error
|
||||
var err error
|
||||
if resources, err = this.getResources(path); err != nil {
|
||||
return "", ErrNotFound
|
||||
}
|
||||
|
|
@ -627,9 +627,9 @@ func (this Dav) getResourceURI(path string) (string, error) {
|
|||
}
|
||||
|
||||
func (this Dav) parseURL(link string) (string, error) {
|
||||
var origin *url.URL
|
||||
var origin *url.URL
|
||||
var destination *url.URL
|
||||
var err error
|
||||
var err error
|
||||
|
||||
if destination, _ = url.Parse(link); err != nil {
|
||||
return "", err
|
||||
|
|
@ -663,8 +663,8 @@ type DavCollection struct {
|
|||
Url string `xml:"href"`
|
||||
Name string `xml:"propstat>prop>displayname,omitempty"`
|
||||
User string `xml:"propstat>prop>current-user-principal>href,omitempty"`
|
||||
Type struct {
|
||||
Inner string `xml:",innerxml"`
|
||||
Type struct {
|
||||
Inner string `xml:",innerxml"`
|
||||
} `xml:"propstat>prop>resourcetype,omitempty"`
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import (
|
|||
)
|
||||
|
||||
func init() {
|
||||
Backend.Register("dropbox", Dropbox{})
|
||||
Backend.Register("dropbox", Dropbox{})
|
||||
}
|
||||
|
||||
type Dropbox struct {
|
||||
|
|
@ -45,21 +45,21 @@ func (d Dropbox) LoginForm() Form {
|
|||
return Form{
|
||||
Elmnts: []FormElement{
|
||||
FormElement{
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "dropbox",
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "dropbox",
|
||||
},
|
||||
FormElement{
|
||||
ReadOnly: true,
|
||||
Name: "oauth2",
|
||||
Type: "text",
|
||||
Value: "/api/session/auth/dropbox",
|
||||
ReadOnly: true,
|
||||
Name: "oauth2",
|
||||
Type: "text",
|
||||
Value: "/api/session/auth/dropbox",
|
||||
},
|
||||
FormElement{
|
||||
ReadOnly: true,
|
||||
Name: "image",
|
||||
Type: "image",
|
||||
Value: "data:image/svg+xml;utf8;base64,PHN2ZyB2aWV3Qm94PSIwIDAgNDIuNCAzOS41IiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIj4KICA8cG9seWdvbiBmaWxsPSIjMDA3RUU1IiBwb2ludHM9IjEyLjUsMCAwLDguMSA4LjcsMTUuMSAyMS4yLDcuMyIvPgo8cG9seWdvbiBmaWxsPSIjMDA3RUU1IiBwb2ludHM9IjAsMjEuOSAxMi41LDMwLjEgMjEuMiwyMi44IDguNywxNS4xIi8+Cjxwb2x5Z29uIGZpbGw9IiMwMDdFRTUiIHBvaW50cz0iMjEuMiwyMi44IDMwLDMwLjEgNDIuNCwyMiAzMy44LDE1LjEiLz4KPHBvbHlnb24gZmlsbD0iIzAwN0VFNSIgcG9pbnRzPSI0Mi40LDguMSAzMCwwIDIxLjIsNy4zIDMzLjgsMTUuMSIvPgo8cG9seWdvbiBmaWxsPSIjMDA3RUU1IiBwb2ludHM9IjIxLjMsMjQuNCAxMi41LDMxLjcgOC44LDI5LjIgOC44LDMyIDIxLjMsMzkuNSAzMy44LDMyIDMzLjgsMjkuMiAzMCwzMS43Ii8+Cjwvc3ZnPgo=",
|
||||
ReadOnly: true,
|
||||
Name: "image",
|
||||
Type: "image",
|
||||
Value: "data:image/svg+xml;utf8;base64,PHN2ZyB2aWV3Qm94PSIwIDAgNDIuNCAzOS41IiB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHhtbG5zOnhsaW5rPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5L3hsaW5rIj4KICA8cG9seWdvbiBmaWxsPSIjMDA3RUU1IiBwb2ludHM9IjEyLjUsMCAwLDguMSA4LjcsMTUuMSAyMS4yLDcuMyIvPgo8cG9seWdvbiBmaWxsPSIjMDA3RUU1IiBwb2ludHM9IjAsMjEuOSAxMi41LDMwLjEgMjEuMiwyMi44IDguNywxNS4xIi8+Cjxwb2x5Z29uIGZpbGw9IiMwMDdFRTUiIHBvaW50cz0iMjEuMiwyMi44IDMwLDMwLjEgNDIuNCwyMiAzMy44LDE1LjEiLz4KPHBvbHlnb24gZmlsbD0iIzAwN0VFNSIgcG9pbnRzPSI0Mi40LDguMSAzMCwwIDIxLjIsNy4zIDMzLjgsMTUuMSIvPgo8cG9seWdvbiBmaWxsPSIjMDA3RUU1IiBwb2ludHM9IjIxLjMsMjQuNCAxMi41LDMxLjcgOC44LDI5LjIgOC44LDMyIDIxLjMsMzkuNSAzMy44LDMyIDMzLjgsMjkuMiAzMCwzMS43Ii8+Cjwvc3ZnPgo=",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
|
|
|||
|
|
@ -99,9 +99,9 @@ func (f Ftp) LoginForm() Form {
|
|||
return Form{
|
||||
Elmnts: []FormElement{
|
||||
FormElement{
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "ftp",
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "ftp",
|
||||
},
|
||||
FormElement{
|
||||
Name: "hostname",
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load diff
|
|
@ -10,14 +10,14 @@ import (
|
|||
"os"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Mysql struct {
|
||||
params map[string]string
|
||||
db *sql.DB
|
||||
db *sql.DB
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
|
@ -47,7 +47,7 @@ func (this Mysql) Init(params map[string]string, app *App) (IBackend, error) {
|
|||
}
|
||||
return Mysql{
|
||||
params: params,
|
||||
db: db,
|
||||
db: db,
|
||||
}, nil
|
||||
}
|
||||
|
||||
|
|
@ -55,9 +55,9 @@ func (this Mysql) LoginForm() Form {
|
|||
return Form{
|
||||
Elmnts: []FormElement{
|
||||
FormElement{
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "mysql",
|
||||
Name: "type",
|
||||
Type: "hidden",
|
||||
Value: "mysql",
|
||||
},
|
||||
FormElement{
|
||||
Name: "host",
|
||||
|
|
@ -189,7 +189,7 @@ func (this Mysql) Ls(path string) ([]os.FileInfo, error) {
|
|||
extractNamePlus := func(s []QuerySelection) []string {
|
||||
t := make([]string, 0, len(s))
|
||||
for i := range s {
|
||||
t = append(t, "IFNULL(" + extractSingleName(s[i]) + ", '')")
|
||||
t = append(t, "IFNULL("+extractSingleName(s[i])+", '')")
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
|
@ -203,7 +203,7 @@ func (this Mysql) Ls(path string) ([]os.FileInfo, error) {
|
|||
}
|
||||
return q
|
||||
}(),
|
||||
func() string{
|
||||
func() string {
|
||||
if extractSingleName(sqlFields.Date) != "" {
|
||||
return ", " + extractSingleName(sqlFields.Date) + " as date "
|
||||
}
|
||||
|
|
@ -217,7 +217,7 @@ func (this Mysql) Ls(path string) ([]os.FileInfo, error) {
|
|||
}
|
||||
return ""
|
||||
}(),
|
||||
));
|
||||
))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -235,7 +235,7 @@ func (this Mysql) Ls(path string) ([]os.FileInfo, error) {
|
|||
}
|
||||
}
|
||||
files = append(files, File{
|
||||
FName: string(name_raw)+".form",
|
||||
FName: string(name_raw) + ".form",
|
||||
FType: "file",
|
||||
FSize: -1,
|
||||
FTime: func() int64 {
|
||||
|
|
@ -348,9 +348,9 @@ func (this Mysql) Cat(path string) (io.ReadCloser, error) {
|
|||
case "enum":
|
||||
el.Type = "select"
|
||||
reg := regexp.MustCompile(`^'(.*)'$`)
|
||||
el.Opts = func () []string{
|
||||
el.Opts = func() []string {
|
||||
r := strings.Split(strings.TrimSuffix(strings.TrimPrefix(fields.All[columnsName[i]].RawType, "enum("), ")"), ",")
|
||||
for i:=0; i<len(r); i++ {
|
||||
for i := 0; i < len(r); i++ {
|
||||
r[i] = reg.ReplaceAllString(r[i], `$1`)
|
||||
}
|
||||
return r
|
||||
|
|
@ -389,7 +389,7 @@ func (this Mysql) Cat(path string) (io.ReadCloser, error) {
|
|||
el.MultiValue = false
|
||||
el.Datalist = choices
|
||||
|
||||
if l, err := FindWhoOwns(this.db, DBLocation{ location.db, location.table, columnsName[i]}); err == nil {
|
||||
if l, err := FindWhoOwns(this.db, DBLocation{location.db, location.table, columnsName[i]}); err == nil {
|
||||
el.Description = fmt.Sprintf(
|
||||
"Relates to object in %s",
|
||||
generateLink(this.params["path"], l, el.Value),
|
||||
|
|
@ -403,12 +403,12 @@ func (this Mysql) Cat(path string) (io.ReadCloser, error) {
|
|||
|
||||
if len(locations) > 0 {
|
||||
text := []string{}
|
||||
for i:=0; i<len(locations); i++ {
|
||||
for i := 0; i < len(locations); i++ {
|
||||
text = append(
|
||||
text,
|
||||
fmt.Sprintf(
|
||||
"%s (%d)",
|
||||
generateLink(this.params["path"], DBLocation{ locations[i].db, locations[i].table, locations[i].row }, el.Value),
|
||||
generateLink(this.params["path"], DBLocation{locations[i].db, locations[i].table, locations[i].row}, el.Value),
|
||||
FindHowManyOccurenceOfaValue(this.db, locations[i], el.Value),
|
||||
),
|
||||
)
|
||||
|
|
@ -421,7 +421,7 @@ func (this Mysql) Cat(path string) (io.ReadCloser, error) {
|
|||
}
|
||||
|
||||
// STEP 3: Send the form back to the user
|
||||
b, err := Form{Elmnts: forms}.MarshalJSON();
|
||||
b, err := Form{Elmnts: forms}.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
|
@ -506,7 +506,7 @@ func (this Mysql) Touch(path string) error {
|
|||
}
|
||||
return strings.Join(values, ",")
|
||||
}(),
|
||||
func()string {
|
||||
func() string {
|
||||
values := make([]string, len(fields.Select))
|
||||
for i := range values {
|
||||
values[i] = "?"
|
||||
|
|
@ -532,7 +532,7 @@ func (this Mysql) Touch(path string) error {
|
|||
}
|
||||
return valuesOfQuery
|
||||
}()
|
||||
_ ,err = this.db.Exec(query, queryValues...)
|
||||
_, err = this.db.Exec(query, queryValues...)
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
@ -540,6 +540,7 @@ type SqlKeyParams struct {
|
|||
Key string
|
||||
Value interface{}
|
||||
}
|
||||
|
||||
func (this Mysql) Save(path string, file io.Reader) error {
|
||||
defer this.db.Close()
|
||||
location, err := NewDBLocation(path)
|
||||
|
|
@ -635,7 +636,7 @@ func NewDBLocation(path string) (DBLocation, error) {
|
|||
return location, nil
|
||||
} else if lPath == 2 {
|
||||
location = DBLocation{
|
||||
db: p[0],
|
||||
db: p[0],
|
||||
table: p[1],
|
||||
}
|
||||
if isValid(p[0]) == false || isValid(p[1]) == false {
|
||||
|
|
@ -644,9 +645,9 @@ func NewDBLocation(path string) (DBLocation, error) {
|
|||
return location, nil
|
||||
} else if lPath == 3 {
|
||||
location = DBLocation{
|
||||
db: p[0],
|
||||
db: p[0],
|
||||
table: p[1],
|
||||
row: strings.TrimSuffix(p[2], ".form"),
|
||||
row: strings.TrimSuffix(p[2], ".form"),
|
||||
}
|
||||
if isValid(p[0]) == false || isValid(p[1]) == false {
|
||||
return location, ErrNotValid
|
||||
|
|
@ -691,10 +692,10 @@ func sqlWhereClause(s SqlFields, location DBLocation) (string, []interface{}) {
|
|||
func FindQuerySelection(db *sql.DB, location DBLocation) (SqlFields, error) {
|
||||
var queryCandidates []QuerySelection = make([]QuerySelection, 0)
|
||||
var fields SqlFields = SqlFields{
|
||||
Order: make([]QuerySelection, 0),
|
||||
Select: make([]QuerySelection, 0),
|
||||
Order: make([]QuerySelection, 0),
|
||||
Select: make([]QuerySelection, 0),
|
||||
Esthetics: make([]QuerySelection, 0),
|
||||
All: make(map[string]QuerySelection, 0),
|
||||
All: make(map[string]QuerySelection, 0),
|
||||
}
|
||||
if location.db == "" || location.table == "" {
|
||||
return fields, ErrNotValid
|
||||
|
|
@ -736,7 +737,7 @@ func FindQuerySelection(db *sql.DB, location DBLocation) (SqlFields, error) {
|
|||
return false
|
||||
}(),
|
||||
RawType: column_type,
|
||||
Key: column_key,
|
||||
Key: column_key,
|
||||
}
|
||||
fields.All[column_name] = q
|
||||
queryCandidates = append(queryCandidates, q)
|
||||
|
|
@ -746,7 +747,7 @@ func FindQuerySelection(db *sql.DB, location DBLocation) (SqlFields, error) {
|
|||
}
|
||||
|
||||
// STEP 2: filter out unwanted fields from the schema
|
||||
for i:=0; i<len(queryCandidates); i++ {
|
||||
for i := 0; i < len(queryCandidates); i++ {
|
||||
if queryCandidates[i].Key == "PRI" || queryCandidates[i].Key == "UNI" {
|
||||
fields.Select = append(fields.Select, queryCandidates[i])
|
||||
if queryCandidates[i].Type == "date" {
|
||||
|
|
@ -780,7 +781,7 @@ func FindQuerySelection(db *sql.DB, location DBLocation) (SqlFields, error) {
|
|||
return queryCandidates[i].Size < queryCandidates[j].Size
|
||||
})
|
||||
var size int = 0
|
||||
var i int = 0
|
||||
var i int = 0
|
||||
for i = range queryCandidates {
|
||||
query := fmt.Sprintf(
|
||||
"SELECT COUNT(%s), COUNT(DISTINCT(%s)) FROM %s.%s",
|
||||
|
|
@ -790,14 +791,14 @@ func FindQuerySelection(db *sql.DB, location DBLocation) (SqlFields, error) {
|
|||
location.table,
|
||||
)
|
||||
size += queryCandidates[i].Size
|
||||
var count_all int
|
||||
var count_all int
|
||||
var count_distinct int
|
||||
if err := db.QueryRow(query).Scan(&count_all, &count_distinct); err != nil {
|
||||
return fields, err
|
||||
}
|
||||
if count_all == count_distinct {
|
||||
fields.Select = append(fields.Select, queryCandidates[i])
|
||||
fields.Esthetics = func() []QuerySelection{
|
||||
fields.Esthetics = func() []QuerySelection {
|
||||
var i int
|
||||
esthetics := make([]QuerySelection, 0, len(fields.Esthetics))
|
||||
for i = range fields.Esthetics {
|
||||
|
|
@ -810,7 +811,7 @@ func FindQuerySelection(db *sql.DB, location DBLocation) (SqlFields, error) {
|
|||
break
|
||||
}
|
||||
}
|
||||
if i == len(queryCandidates) - 1 {
|
||||
if i == len(queryCandidates)-1 {
|
||||
if size > 200 {
|
||||
return fields, NewError("This table doesn't have any defined keys.", 405)
|
||||
}
|
||||
|
|
@ -852,7 +853,7 @@ func FindQuerySelection(db *sql.DB, location DBLocation) (SqlFields, error) {
|
|||
}
|
||||
return fields.Order[0].Name
|
||||
}()
|
||||
fields.Esthetics = func() []QuerySelection{ // fields whose only value is to make our generated field look good
|
||||
fields.Esthetics = func() []QuerySelection { // fields whose only value is to make our generated field look good
|
||||
var size int = 0
|
||||
var i int
|
||||
for i = range fields.Select {
|
||||
|
|
@ -860,12 +861,12 @@ func FindQuerySelection(db *sql.DB, location DBLocation) (SqlFields, error) {
|
|||
}
|
||||
for i = range fields.Esthetics {
|
||||
s := fields.Esthetics[i].Size
|
||||
if size + s > 100 {
|
||||
if size+s > 100 {
|
||||
break
|
||||
}
|
||||
size += s
|
||||
}
|
||||
if i+1 > len(fields.Esthetics){
|
||||
if i+1 > len(fields.Esthetics) {
|
||||
return fields.Esthetics
|
||||
}
|
||||
return fields.Esthetics[:i+1]
|
||||
|
|
@ -920,9 +921,9 @@ func FindWhoIsUsing(db *sql.DB, location DBLocation) ([]DBLocation, error) {
|
|||
return locations, err
|
||||
}
|
||||
locations = append(locations, DBLocation{
|
||||
db: table_schema,
|
||||
db: table_schema,
|
||||
table: table_name,
|
||||
row: column_name,
|
||||
row: column_name,
|
||||
})
|
||||
}
|
||||
return locations, nil
|
||||
|
|
@ -941,7 +942,7 @@ func FindWhoOwns(db *sql.DB, location DBLocation) (DBLocation, error) {
|
|||
).Scan(&referenced_table_schema, &referenced_table_name, &referenced_column_name); err != nil {
|
||||
return DBLocation{}, err
|
||||
}
|
||||
return DBLocation{ referenced_table_schema, referenced_table_name, referenced_column_name }, nil
|
||||
return DBLocation{referenced_table_schema, referenced_table_name, referenced_column_name}, nil
|
||||
}
|
||||
|
||||
func FindHowManyOccurenceOfaValue(db *sql.DB, location DBLocation, value interface{}) int {
|
||||
|
|
|
|||
|
|
@ -22,13 +22,13 @@ import (
|
|||
|
||||
var (
|
||||
SECRET_KEY_DERIVATE_FOR_ONLYOFFICE string
|
||||
OnlyOfficeCache *cache.Cache
|
||||
OnlyOfficeCache *cache.Cache
|
||||
)
|
||||
|
||||
type OnlyOfficeCacheData struct {
|
||||
Path string
|
||||
Save func(path string, file io.Reader) error
|
||||
Cat func(path string) (io.ReadCloser, error)
|
||||
Cat func(path string) (io.ReadCloser, error)
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
|
@ -69,7 +69,7 @@ func init() {
|
|||
return
|
||||
}
|
||||
|
||||
SECRET_KEY_DERIVATE_FOR_ONLYOFFICE = Hash("ONLYOFFICE_" + SECRET_KEY, len(SECRET_KEY))
|
||||
SECRET_KEY_DERIVATE_FOR_ONLYOFFICE = Hash("ONLYOFFICE_"+SECRET_KEY, len(SECRET_KEY))
|
||||
Hooks.Register.HttpEndpoint(func(r *mux.Router, app *App) error {
|
||||
oods := r.PathPrefix("/onlyoffice").Subrouter()
|
||||
oods.PathPrefix("/static/").HandlerFunc(StaticHandler).Methods("GET", "POST")
|
||||
|
|
@ -77,10 +77,10 @@ func init() {
|
|||
oods.HandleFunc("/content", FetchContentHandler).Methods("GET")
|
||||
|
||||
r.HandleFunc(
|
||||
COOKIE_PATH + "onlyoffice/iframe",
|
||||
COOKIE_PATH+"onlyoffice/iframe",
|
||||
NewMiddlewareChain(
|
||||
IframeContentHandler,
|
||||
[]Middleware{ SessionStart, LoggedInOnly },
|
||||
[]Middleware{SessionStart, LoggedInOnly},
|
||||
*app,
|
||||
),
|
||||
).Methods("GET")
|
||||
|
|
@ -105,7 +105,7 @@ func StaticHandler(res http.ResponseWriter, req *http.Request) {
|
|||
SendErrorResult(res, err)
|
||||
return
|
||||
}
|
||||
req.Header.Set("X-Forwarded-Host", req.Host + "/onlyoffice/static")
|
||||
req.Header.Set("X-Forwarded-Host", req.Host+"/onlyoffice/static")
|
||||
req.Header.Set("X-Forwarded-Proto", func() string {
|
||||
if scheme := req.Header.Get("X-Forwarded-Proto"); scheme != "" {
|
||||
return scheme
|
||||
|
|
@ -158,17 +158,17 @@ func IframeContentHandler(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
}
|
||||
|
||||
var (
|
||||
path string // path of the file we want to open via onlyoffice
|
||||
filestashServerLocation string // location from which the oods server can reach filestash
|
||||
userId string // as seen by onlyoffice to distinguish different users
|
||||
username string // username as displayed by only office
|
||||
key string // unique identifier for a file as seen be only office
|
||||
contentType string // name of the application in onlyoffice
|
||||
filetype string // extension of the document
|
||||
filename string // filename of the document
|
||||
oodsMode string // edit mode
|
||||
oodsDevice string // mobile, desktop of embedded
|
||||
localip string
|
||||
path string // path of the file we want to open via onlyoffice
|
||||
filestashServerLocation string // location from which the oods server can reach filestash
|
||||
userId string // as seen by onlyoffice to distinguish different users
|
||||
username string // username as displayed by only office
|
||||
key string // unique identifier for a file as seen be only office
|
||||
contentType string // name of the application in onlyoffice
|
||||
filetype string // extension of the document
|
||||
filename string // filename of the document
|
||||
oodsMode string // edit mode
|
||||
oodsDevice string // mobile, desktop of embedded
|
||||
localip string
|
||||
)
|
||||
query := req.URL.Query()
|
||||
path, err := ctrl.PathBuilder(ctx, query.Get("path"))
|
||||
|
|
@ -184,7 +184,7 @@ func IframeContentHandler(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
key = HashStream(f, 20)
|
||||
key = Hash(key + userId + path, 20)
|
||||
key = Hash(key+userId+path, 20)
|
||||
|
||||
filename = filepath.Base(path)
|
||||
oodsMode = func() string {
|
||||
|
|
@ -239,25 +239,34 @@ func IframeContentHandler(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
filestashServerLocation = fmt.Sprintf("http://%s:%d", localip, Config.Get("general.port").Int())
|
||||
contentType = func(p string) string {
|
||||
var (
|
||||
word string = "text"
|
||||
excel string = "spreadsheet"
|
||||
word string = "text"
|
||||
excel string = "spreadsheet"
|
||||
powerpoint string = "presentation"
|
||||
)
|
||||
switch GetMimeType(p) {
|
||||
case "application/word": return word
|
||||
case "application/msword": return word
|
||||
case "application/vnd.oasis.opendocument.text": return word
|
||||
case "application/vnd.oasis.opendocument.spreadsheet": return excel
|
||||
case "application/excel": return excel
|
||||
case "application/vnd.ms-excel": return excel
|
||||
case "application/powerpoint": return powerpoint
|
||||
case "application/vnd.ms-powerpoint": return powerpoint
|
||||
case "application/vnd.oasis.opendocument.presentation": return powerpoint
|
||||
case "application/word":
|
||||
return word
|
||||
case "application/msword":
|
||||
return word
|
||||
case "application/vnd.oasis.opendocument.text":
|
||||
return word
|
||||
case "application/vnd.oasis.opendocument.spreadsheet":
|
||||
return excel
|
||||
case "application/excel":
|
||||
return excel
|
||||
case "application/vnd.ms-excel":
|
||||
return excel
|
||||
case "application/powerpoint":
|
||||
return powerpoint
|
||||
case "application/vnd.ms-powerpoint":
|
||||
return powerpoint
|
||||
case "application/vnd.oasis.opendocument.presentation":
|
||||
return powerpoint
|
||||
}
|
||||
return ""
|
||||
}(path)
|
||||
filetype = strings.TrimPrefix(filepath.Ext(filename), ".")
|
||||
OnlyOfficeCache.Set(key, &OnlyOfficeCacheData{ path, ctx.Backend.Save, ctx.Backend.Cat }, cache.DefaultExpiration)
|
||||
OnlyOfficeCache.Set(key, &OnlyOfficeCacheData{path, ctx.Backend.Save, ctx.Backend.Cat}, cache.DefaultExpiration)
|
||||
res.Write([]byte(fmt.Sprintf(`<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
|
|
@ -351,39 +360,40 @@ func FetchContentHandler(res http.ResponseWriter, req *http.Request) {
|
|||
|
||||
type OnlyOfficeEventObject struct {
|
||||
Actions []struct {
|
||||
Type int `json: "type"`
|
||||
UserId string `json: "userid" `
|
||||
} `json: "actions"`
|
||||
ChangesURL string `json: "changesurl"`
|
||||
Forcesavetype int `json: "forcesavetype"`
|
||||
History struct {
|
||||
Type int `json: "type"`
|
||||
UserId string `json: "userid" `
|
||||
} `json: "actions"`
|
||||
ChangesURL string `json: "changesurl"`
|
||||
Forcesavetype int `json: "forcesavetype"`
|
||||
History struct {
|
||||
ServerVersion string `json: "serverVersion"`
|
||||
Changes []struct {
|
||||
Created string `json: "created"`
|
||||
User struct {
|
||||
Id string `json: "id"`
|
||||
Name string `json: "name"`
|
||||
Changes []struct {
|
||||
Created string `json: "created"`
|
||||
User struct {
|
||||
Id string `json: "id"`
|
||||
Name string `json: "name"`
|
||||
}
|
||||
} `json: "changes"`
|
||||
} `json: "history"`
|
||||
Key string `json: "key"`
|
||||
Status int `json: "status"`
|
||||
Url string `json: "url"`
|
||||
UserData string `json: "userdata"`
|
||||
Lastsave string `json: "lastsave"`
|
||||
Users []string `json: "users"`
|
||||
} `json: "changes"`
|
||||
} `json: "history"`
|
||||
Key string `json: "key"`
|
||||
Status int `json: "status"`
|
||||
Url string `json: "url"`
|
||||
UserData string `json: "userdata"`
|
||||
Lastsave string `json: "lastsave"`
|
||||
Users []string `json: "users"`
|
||||
}
|
||||
|
||||
func OnlyOfficeEventHandler(res http.ResponseWriter, req *http.Request) {
|
||||
event := OnlyOfficeEventObject{}
|
||||
if err := json.NewDecoder(req.Body).Decode(&event); err != nil {
|
||||
SendErrorResult(res, err)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
req.Body.Close()
|
||||
|
||||
switch event.Status {
|
||||
case 0: Log.Warning("[onlyoffice] no document with the key identifier could be found. %+v", event)
|
||||
case 0:
|
||||
Log.Warning("[onlyoffice] no document with the key identifier could be found. %+v", event)
|
||||
case 1:
|
||||
// document is being edited
|
||||
case 2:
|
||||
|
|
@ -396,7 +406,7 @@ func OnlyOfficeEventHandler(res http.ResponseWriter, req *http.Request) {
|
|||
case 5:
|
||||
Log.Warning("[onlyoffice] undocumented status. %+v", event)
|
||||
case 6: // document is being edited, but the current document state is saved
|
||||
saveObject, found := OnlyOfficeCache.Get(event.Key);
|
||||
saveObject, found := OnlyOfficeCache.Get(event.Key)
|
||||
if found == false {
|
||||
res.WriteHeader(http.StatusInternalServerError)
|
||||
res.Write([]byte(`{"error": 1, "message": "doens't know where to store the given data"}`))
|
||||
|
|
@ -422,8 +432,10 @@ func OnlyOfficeEventHandler(res http.ResponseWriter, req *http.Request) {
|
|||
return
|
||||
}
|
||||
f.Body.Close()
|
||||
case 7: Log.Warning("[onlyoffice] error has occurred while force saving the document. %+v", event)
|
||||
default: Log.Warning("[onlyoffice] undocumented status. %+v", event)
|
||||
case 7:
|
||||
Log.Warning("[onlyoffice] error has occurred while force saving the document. %+v", event)
|
||||
default:
|
||||
Log.Warning("[onlyoffice] undocumented status. %+v", event)
|
||||
}
|
||||
res.Write([]byte(`{"error": 0}`))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ package plg_handler_console
|
|||
|
||||
import (
|
||||
"encoding/base64"
|
||||
"encoding/json"
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/gorilla/websocket"
|
||||
"github.com/kr/pty"
|
||||
|
|
@ -15,15 +16,14 @@ import (
|
|||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"encoding/json"
|
||||
"unsafe"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
var console_enable = func() bool {
|
||||
return Config.Get("features.server.console_enable").Schema(func(f *FormElement) *FormElement{
|
||||
return Config.Get("features.server.console_enable").Schema(func(f *FormElement) *FormElement {
|
||||
if f == nil {
|
||||
f = &FormElement{}
|
||||
}
|
||||
|
|
@ -145,6 +145,7 @@ var resizeMessage = struct {
|
|||
X uint16
|
||||
Y uint16
|
||||
}{}
|
||||
|
||||
func handleSocket(res http.ResponseWriter, req *http.Request) {
|
||||
conn, err := upgrader.Upgrade(res, req, nil)
|
||||
if err != nil {
|
||||
|
|
@ -176,8 +177,8 @@ EOF
|
|||
cmd = exec.Command("/bin/sh")
|
||||
cmd.Env = []string{
|
||||
"TERM=xterm",
|
||||
"PATH="+os.Getenv("PATH"),
|
||||
"HOME="+os.Getenv("HOME"),
|
||||
"PATH=" + os.Getenv("PATH"),
|
||||
"HOME=" + os.Getenv("HOME"),
|
||||
}
|
||||
} else {
|
||||
res.WriteHeader(http.StatusNotFound)
|
||||
|
|
@ -252,7 +253,6 @@ EOF
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
func htmlIndex(pathPrefix string) []byte {
|
||||
return []byte(`<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
|
|
@ -264,14 +264,14 @@ func htmlIndex(pathPrefix string) []byte {
|
|||
<meta content="name" name="apple-mobile-web-app-title">
|
||||
<meta content="black-translucent" name="apple-mobile-web-app-status-bar-style">
|
||||
<title></title>
|
||||
<script>`+VendorScript()+`</script>
|
||||
<style>`+VendorStyle()+`</style>
|
||||
<style>`+AppStyle()+`</style>
|
||||
<script>` + VendorScript() + `</script>
|
||||
<style>` + VendorStyle() + `</style>
|
||||
<style>` + AppStyle() + `</style>
|
||||
</head>
|
||||
<body>
|
||||
<div id="terminal"></div>
|
||||
<div id="error-message"></div>
|
||||
<script>`+AppScript(pathPrefix)+`</script>
|
||||
<script>` + AppScript(pathPrefix) + `</script>
|
||||
</body>
|
||||
</html>`)
|
||||
}
|
||||
|
|
@ -310,7 +310,7 @@ func AppScript(pathPrefix string) string {
|
|||
var websocket = new WebSocket(
|
||||
(location.protocol === "https:" ? "wss://" : "ws://") +
|
||||
location.hostname + ((location.port) ? (":" + location.port) : "") +
|
||||
"`+ pathPrefix +`socket"
|
||||
"` + pathPrefix + `socket"
|
||||
);
|
||||
websocket.binaryType = "arraybuffer";
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ import (
|
|||
)
|
||||
|
||||
const SYNCTHING_URI = "/admin/syncthing"
|
||||
|
||||
func init() {
|
||||
plugin_enable := Config.Get("features.syncthing.enable").Schema(func(f *FormElement) *FormElement {
|
||||
if f == nil {
|
||||
|
|
@ -54,10 +55,10 @@ func init() {
|
|||
if plugin_enable == false {
|
||||
return nil
|
||||
}
|
||||
r.HandleFunc(SYNCTHING_URI, func (res http.ResponseWriter, req *http.Request) {
|
||||
http.Redirect(res, req, SYNCTHING_URI + "/", http.StatusTemporaryRedirect)
|
||||
r.HandleFunc(SYNCTHING_URI, func(res http.ResponseWriter, req *http.Request) {
|
||||
http.Redirect(res, req, SYNCTHING_URI+"/", http.StatusTemporaryRedirect)
|
||||
})
|
||||
r.Handle(SYNCTHING_URI + "/", AuthBasic(
|
||||
r.Handle(SYNCTHING_URI+"/", AuthBasic(
|
||||
func() (string, string) { return "admin", Config.Get("auth.admin").String() },
|
||||
http.HandlerFunc(SyncthingProxyHandler),
|
||||
))
|
||||
|
|
@ -111,7 +112,7 @@ func AuthBasic(credentials func() (string, string), fn http.Handler) http.Handle
|
|||
|
||||
func SyncthingProxyHandler(res http.ResponseWriter, req *http.Request) {
|
||||
req.URL.Path = strings.TrimPrefix(req.URL.Path, SYNCTHING_URI)
|
||||
req.Header.Set("X-Forwarded-Host", req.Host + SYNCTHING_URI)
|
||||
req.Header.Set("X-Forwarded-Host", req.Host+SYNCTHING_URI)
|
||||
req.Header.Set("X-Forwarded-Proto", func() string {
|
||||
if scheme := req.Header.Get("X-Forwarded-Proto"); scheme != "" {
|
||||
return scheme
|
||||
|
|
|
|||
|
|
@ -7,8 +7,8 @@ import (
|
|||
"net/http"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const ImageCachePath = "data/cache/image/"
|
||||
|
|
@ -112,7 +112,7 @@ func init() {
|
|||
os.RemoveAll(cachePath)
|
||||
os.MkdirAll(cachePath, os.ModePerm)
|
||||
|
||||
Hooks.Register.ProcessFileContentBeforeSend(func (reader io.ReadCloser, ctx *App, res *http.ResponseWriter, req *http.Request) (io.ReadCloser, error){
|
||||
Hooks.Register.ProcessFileContentBeforeSend(func(reader io.ReadCloser, ctx *App, res *http.ResponseWriter, req *http.Request) (io.ReadCloser, error) {
|
||||
if plugin_enable() == false {
|
||||
return reader, nil
|
||||
}
|
||||
|
|
@ -135,11 +135,11 @@ func init() {
|
|||
/////////////////////////
|
||||
// Specify transformation
|
||||
transform := &Transform{
|
||||
Input: GetAbsolutePath(ImageCachePath + "imagein_" + QuickString(10)),
|
||||
Size: thumb_size(),
|
||||
Crop: true,
|
||||
Quality: thumb_quality(),
|
||||
Exif: false,
|
||||
Input: GetAbsolutePath(ImageCachePath + "imagein_" + QuickString(10)),
|
||||
Size: thumb_size(),
|
||||
Crop: true,
|
||||
Quality: thumb_quality(),
|
||||
Exif: false,
|
||||
}
|
||||
if query.Get("thumbnail") == "true" {
|
||||
(*res).Header().Set("Cache-Control", fmt.Sprintf("max-age=%d", thumb_caching()))
|
||||
|
|
@ -172,12 +172,12 @@ func init() {
|
|||
/////////////////////////
|
||||
// Transcode RAW image
|
||||
if IsRaw(mType) {
|
||||
if ExtractPreview(transform) == nil {
|
||||
if ExtractPreview(transform) == nil {
|
||||
mType = "image/jpeg"
|
||||
(*res).Header().Set("Content-Type", mType)
|
||||
} else {
|
||||
return reader, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/////////////////////////
|
||||
|
|
@ -191,9 +191,9 @@ func init() {
|
|||
}
|
||||
|
||||
type Transform struct {
|
||||
Input string
|
||||
Size int
|
||||
Crop bool
|
||||
Quality int
|
||||
Exif bool
|
||||
Input string
|
||||
Size int
|
||||
Crop bool
|
||||
Quality int
|
||||
Exif bool
|
||||
}
|
||||
|
|
|
|||
|
|
@ -47,12 +47,12 @@ func CreateThumbnail(t *Transform) (io.ReadCloser, error) {
|
|||
}()
|
||||
|
||||
select {
|
||||
case img := <- imageChannel:
|
||||
case img := <-imageChannel:
|
||||
if img == nil {
|
||||
return nil, ErrNotValid
|
||||
}
|
||||
return img, nil
|
||||
case <- ctx.Done():
|
||||
case <-ctx.Done():
|
||||
return nil, ErrTimeout
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,13 @@
|
|||
package plg_image_light
|
||||
|
||||
// #cgo CFLAGS: -I./deps/src
|
||||
// #include "libtranscode.h"
|
||||
import "C"
|
||||
|
||||
import (
|
||||
"context"
|
||||
"golang.org/x/sync/semaphore"
|
||||
. "github.com/mickael-kerjean/filestash/server/common"
|
||||
"golang.org/x/sync/semaphore"
|
||||
"time"
|
||||
"unsafe"
|
||||
)
|
||||
|
|
@ -52,7 +53,7 @@ func IsRaw(mType string) bool {
|
|||
func ExtractPreview(t *Transform) error {
|
||||
ctx, cancel := context.WithDeadline(context.Background(), time.Now().Add(TRANSCODE_TIMEOUT))
|
||||
defer cancel()
|
||||
|
||||
|
||||
if err := LIBRAW_LOCK.Acquire(ctx, 1); err != nil {
|
||||
return ErrCongestion
|
||||
}
|
||||
|
|
@ -69,9 +70,9 @@ func ExtractPreview(t *Transform) error {
|
|||
}()
|
||||
|
||||
select {
|
||||
case err := <- transcodeChannel:
|
||||
case err := <-transcodeChannel:
|
||||
return err
|
||||
case <- ctx.Done():
|
||||
return ErrTimeout
|
||||
case <-ctx.Done():
|
||||
return ErrTimeout
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,12 +11,12 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
gzipBomb *bytes.Buffer
|
||||
gzipBomb *bytes.Buffer
|
||||
billionsOfLol *bytes.Buffer
|
||||
)
|
||||
|
||||
func init() {
|
||||
if plugin_enable := Config.Get("features.protection.enable").Schema(func(f *FormElement) *FormElement{
|
||||
if plugin_enable := Config.Get("features.protection.enable").Schema(func(f *FormElement) *FormElement {
|
||||
if f == nil {
|
||||
f = &FormElement{}
|
||||
}
|
||||
|
|
@ -41,7 +41,7 @@ func init() {
|
|||
}
|
||||
billionsOfLol = bytes.NewBuffer(b)
|
||||
|
||||
Hooks.Register.HttpEndpoint(func(r *mux.Router, _ *App) error{
|
||||
Hooks.Register.HttpEndpoint(func(r *mux.Router, _ *App) error {
|
||||
// DEFAULT
|
||||
r.HandleFunc("/index.php", WelcomePackHandle)
|
||||
r.PathPrefix("/html/").Handler(http.HandlerFunc(WelcomePackHandle))
|
||||
|
|
@ -210,7 +210,7 @@ func WelcomePackHandle(res http.ResponseWriter, req *http.Request) {
|
|||
HandleRedirectOwnIP(res, req)
|
||||
} else if r < 55 {
|
||||
HandleRedirectGeo(res, req)
|
||||
}else if r < 70 {
|
||||
} else if r < 70 {
|
||||
HandleXMLBomb(res, req)
|
||||
} else {
|
||||
HandleGzipBomb(res, req)
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ func init() {
|
|||
}
|
||||
disable_svg()
|
||||
|
||||
Hooks.Register.ProcessFileContentBeforeSend(func (reader io.ReadCloser, ctx *App, res *http.ResponseWriter, req *http.Request) (io.ReadCloser, error){
|
||||
Hooks.Register.ProcessFileContentBeforeSend(func(reader io.ReadCloser, ctx *App, res *http.ResponseWriter, req *http.Request) (io.ReadCloser, error) {
|
||||
if GetMimeType(req.URL.Query().Get("path")) != "image/svg+xml" {
|
||||
return reader, nil
|
||||
} else if disable_svg() == true {
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import (
|
|||
func init() {
|
||||
port := Config.Get("general.port").Int()
|
||||
|
||||
Hooks.Register.Starter(func (r *mux.Router) {
|
||||
Hooks.Register.Starter(func(r *mux.Router) {
|
||||
Log.Info("[http] starting ...")
|
||||
srv := &http.Server{
|
||||
Addr: fmt.Sprintf(":%d", port),
|
||||
|
|
|
|||
|
|
@ -24,13 +24,13 @@ func init() {
|
|||
os.MkdirAll(SSL_PATH, os.ModePerm)
|
||||
domain := Config.Get("general.host").String()
|
||||
|
||||
Hooks.Register.Starter(func (r *mux.Router) {
|
||||
Hooks.Register.Starter(func(r *mux.Router) {
|
||||
Log.Info("[https] starting ...%s", domain)
|
||||
srv := &http.Server{
|
||||
Addr: fmt.Sprintf(":https"),
|
||||
Handler: r,
|
||||
Addr: fmt.Sprintf(":https"),
|
||||
Handler: r,
|
||||
TLSConfig: &DefaultTLSConfig,
|
||||
ErrorLog: NewNilLogger(),
|
||||
ErrorLog: NewNilLogger(),
|
||||
}
|
||||
|
||||
switch domain {
|
||||
|
|
@ -39,7 +39,7 @@ func init() {
|
|||
if err != nil {
|
||||
return
|
||||
}
|
||||
srv.TLSConfig.Certificates = []tls.Certificate{ TLSCert }
|
||||
srv.TLSConfig.Certificates = []tls.Certificate{TLSCert}
|
||||
HTTPClient.Transport.(*TransformedTransport).Orig.(*http.Transport).TLSClientConfig = &tls.Config{
|
||||
RootCAs: roots,
|
||||
}
|
||||
|
|
@ -63,7 +63,7 @@ func init() {
|
|||
}()
|
||||
go func() {
|
||||
srv := http.Server{
|
||||
Addr: fmt.Sprintf(":http"),
|
||||
Addr: fmt.Sprintf(":http"),
|
||||
ReadTimeout: 5 * time.Second,
|
||||
WriteTimeout: 5 * time.Second,
|
||||
Handler: http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
|
||||
|
|
@ -71,7 +71,7 @@ func init() {
|
|||
http.Redirect(
|
||||
w,
|
||||
req,
|
||||
"https://" + req.Host + req.URL.String(),
|
||||
"https://"+req.Host+req.URL.String(),
|
||||
http.StatusMovedPermanently,
|
||||
)
|
||||
}),
|
||||
|
|
@ -85,7 +85,6 @@ func init() {
|
|||
})
|
||||
}
|
||||
|
||||
|
||||
func ensureAppHasBooted(address string, message string) {
|
||||
i := 0
|
||||
for {
|
||||
|
|
|
|||
|
|
@ -18,14 +18,14 @@ func init() {
|
|||
os.MkdirAll(SSL_PATH, os.ModePerm)
|
||||
domain := Config.Get("general.host").String()
|
||||
|
||||
Hooks.Register.Starter(func (r *mux.Router) {
|
||||
Hooks.Register.Starter(func(r *mux.Router) {
|
||||
Log.Info("[https] starting ...%s", domain)
|
||||
srv := &http.Server{
|
||||
Addr: fmt.Sprintf(":https"),
|
||||
Handler: r,
|
||||
Addr: fmt.Sprintf(":https"),
|
||||
Handler: r,
|
||||
TLSNextProto: make(map[string]func(*http.Server, *tls.Conn, http.Handler), 0),
|
||||
TLSConfig: &DefaultTLSConfig,
|
||||
ErrorLog: NewNilLogger(),
|
||||
TLSConfig: &DefaultTLSConfig,
|
||||
ErrorLog: NewNilLogger(),
|
||||
}
|
||||
|
||||
switch domain {
|
||||
|
|
@ -34,7 +34,7 @@ func init() {
|
|||
if err != nil {
|
||||
return
|
||||
}
|
||||
srv.TLSConfig.Certificates = []tls.Certificate{ TLSCert }
|
||||
srv.TLSConfig.Certificates = []tls.Certificate{TLSCert}
|
||||
HTTPClient.Transport.(*TransformedTransport).Orig.(*http.Transport).TLSClientConfig = &tls.Config{
|
||||
RootCAs: roots,
|
||||
}
|
||||
|
|
@ -58,7 +58,7 @@ func init() {
|
|||
}()
|
||||
go func() {
|
||||
srv := http.Server{
|
||||
Addr: fmt.Sprintf(":http"),
|
||||
Addr: fmt.Sprintf(":http"),
|
||||
ReadTimeout: 5 * time.Second,
|
||||
WriteTimeout: 5 * time.Second,
|
||||
Handler: http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
|
||||
|
|
@ -66,7 +66,7 @@ func init() {
|
|||
http.Redirect(
|
||||
res,
|
||||
req,
|
||||
"https://" + req.Host + req.URL.String(),
|
||||
"https://"+req.Host+req.URL.String(),
|
||||
http.StatusMovedPermanently,
|
||||
)
|
||||
}),
|
||||
|
|
@ -80,7 +80,6 @@ func init() {
|
|||
})
|
||||
}
|
||||
|
||||
|
||||
func ensureAppHasBooted(address string, message string) {
|
||||
i := 0
|
||||
for {
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ var TOR_PATH string = filepath.Join(GetCurrentDir(), CERT_PATH, "tor")
|
|||
func init() {
|
||||
os.MkdirAll(TOR_PATH, os.ModePerm)
|
||||
enable_tor := func() bool {
|
||||
return Config.Get("features.server.tor_enable").Schema(func(f *FormElement) *FormElement{
|
||||
return Config.Get("features.server.tor_enable").Schema(func(f *FormElement) *FormElement {
|
||||
if f == nil {
|
||||
f = &FormElement{}
|
||||
}
|
||||
|
|
@ -30,7 +30,7 @@ func init() {
|
|||
}).Bool()
|
||||
}
|
||||
enable_tor()
|
||||
Config.Get("features.server.tor_url").Schema(func(f *FormElement) *FormElement{
|
||||
Config.Get("features.server.tor_url").Schema(func(f *FormElement) *FormElement {
|
||||
if f == nil {
|
||||
f = &FormElement{}
|
||||
}
|
||||
|
|
@ -44,15 +44,18 @@ func init() {
|
|||
return f
|
||||
})
|
||||
|
||||
Hooks.Register.Starter(func (r *mux.Router) {
|
||||
Hooks.Register.Starter(func(r *mux.Router) {
|
||||
if enable_tor() == false {
|
||||
startTor := false
|
||||
onChange := Config.ListenForChange()
|
||||
for {
|
||||
select {
|
||||
case <- onChange.Listener: startTor = enable_tor()
|
||||
case <-onChange.Listener:
|
||||
startTor = enable_tor()
|
||||
}
|
||||
if startTor == true {
|
||||
break
|
||||
}
|
||||
if startTor == true { break }
|
||||
}
|
||||
Config.UnlistenForChange(onChange)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,11 +20,11 @@ import (
|
|||
|
||||
const (
|
||||
HLS_SEGMENT_LENGTH = 10
|
||||
CLEAR_CACHE_AFTER = 12
|
||||
VideoCachePath = "data/cache/video/"
|
||||
CLEAR_CACHE_AFTER = 12
|
||||
VideoCachePath = "data/cache/video/"
|
||||
)
|
||||
|
||||
func init(){
|
||||
func init() {
|
||||
ffmpegIsInstalled := false
|
||||
ffprobeIsInstalled := false
|
||||
if _, err := exec.LookPath("ffmpeg"); err == nil {
|
||||
|
|
@ -86,7 +86,7 @@ func init(){
|
|||
Hooks.Register.HttpEndpoint(func(r *mux.Router, app *App) error {
|
||||
r.PathPrefix("/hls/hls_{segment}.ts").Handler(NewMiddlewareChain(
|
||||
hls_transcode,
|
||||
[]Middleware{ SecureHeaders },
|
||||
[]Middleware{SecureHeaders},
|
||||
*app,
|
||||
)).Methods("GET")
|
||||
return nil
|
||||
|
|
@ -99,9 +99,9 @@ func init(){
|
|||
res.Write([]byte(` return sources.map(function(source){`))
|
||||
|
||||
blacklists := strings.Split(blacklist_format(), ",")
|
||||
for i:=0; i<len(blacklists); i++ {
|
||||
for i := 0; i < len(blacklists); i++ {
|
||||
blacklists[i] = strings.TrimSpace(blacklists[i])
|
||||
res.Write([]byte(fmt.Sprintf(`if(source.type == "%s"){ return source; } `, GetMimeType("." + blacklists[i]))))
|
||||
res.Write([]byte(fmt.Sprintf(`if(source.type == "%s"){ return source; } `, GetMimeType("."+blacklists[i]))))
|
||||
}
|
||||
res.Write([]byte(` source.src = source.src + "&transcode=hls";`))
|
||||
res.Write([]byte(` source.type = "application/x-mpegURL";`))
|
||||
|
|
@ -129,7 +129,7 @@ func hls_playlist(reader io.ReadCloser, ctx *App, res *http.ResponseWriter, req
|
|||
VideoCachePath,
|
||||
cacheName,
|
||||
)
|
||||
f, err := os.OpenFile(cachePath, os.O_CREATE | os.O_RDWR, os.ModePerm)
|
||||
f, err := os.OpenFile(cachePath, os.O_CREATE|os.O_RDWR, os.ModePerm)
|
||||
if err != nil {
|
||||
Log.Stdout("ERR %+v", err)
|
||||
return reader, err
|
||||
|
|
@ -137,7 +137,7 @@ func hls_playlist(reader io.ReadCloser, ctx *App, res *http.ResponseWriter, req
|
|||
io.Copy(f, reader)
|
||||
reader.Close()
|
||||
f.Close()
|
||||
time.AfterFunc(CLEAR_CACHE_AFTER * time.Hour, func() { os.Remove(cachePath) })
|
||||
time.AfterFunc(CLEAR_CACHE_AFTER*time.Hour, func() { os.Remove(cachePath) })
|
||||
|
||||
p, err := ffprobe(cachePath)
|
||||
if err != nil {
|
||||
|
|
@ -146,12 +146,12 @@ func hls_playlist(reader io.ReadCloser, ctx *App, res *http.ResponseWriter, req
|
|||
|
||||
var response string
|
||||
var i int
|
||||
response = "#EXTM3U\n"
|
||||
response = "#EXTM3U\n"
|
||||
response += "#EXT-X-VERSION:3\n"
|
||||
response += "#EXT-X-MEDIA-SEQUENCE:0\n"
|
||||
response += "#EXT-X-ALLOW-CACHE:YES\n"
|
||||
response += fmt.Sprintf("#EXT-X-TARGETDURATION:%d\n", HLS_SEGMENT_LENGTH)
|
||||
for i=0; i< int(p.Format.Duration) / HLS_SEGMENT_LENGTH; i++ {
|
||||
for i = 0; i < int(p.Format.Duration)/HLS_SEGMENT_LENGTH; i++ {
|
||||
response += fmt.Sprintf("#EXTINF:%d.0000, nodesc\n", HLS_SEGMENT_LENGTH)
|
||||
response += fmt.Sprintf("/hls/hls_%d.ts?path=%s\n", i, cacheName)
|
||||
}
|
||||
|
|
@ -212,11 +212,11 @@ func hls_transcode(ctx App, res http.ResponseWriter, req *http.Request) {
|
|||
type FFProbeData struct {
|
||||
Format struct {
|
||||
Duration float64 `json:"duration,string"`
|
||||
BitRate int `json:"bit_rate,string"`
|
||||
BitRate int `json:"bit_rate,string"`
|
||||
} `json: "format"`
|
||||
Streams []struct {
|
||||
CodecType string `json:"codec_type"`
|
||||
CodecName string `json:"codec_name"`
|
||||
CodecType string `json:"codec_type"`
|
||||
CodecName string `json:"codec_name"`
|
||||
PixelFormat string `json:"pix_fmt"`
|
||||
} `json:"streams"`
|
||||
}
|
||||
|
|
@ -229,7 +229,7 @@ func ffprobe(videoPath string) (FFProbeData, error) {
|
|||
"ffprobe", strings.Split(fmt.Sprintf(
|
||||
"-v quiet -print_format json -show_format -show_streams %s",
|
||||
videoPath,
|
||||
), " ")...
|
||||
), " ")...,
|
||||
)
|
||||
cmd.Stdout = &stream
|
||||
if err := cmd.Run(); err != nil {
|
||||
|
|
|
|||
Loading…
Reference in a new issue