more conservative text editor

This commit is contained in:
Pierre Dubouilh 2024-09-28 18:02:16 +02:00
parent 31c8ee518a
commit eaf1342559
3 changed files with 57 additions and 33 deletions

View file

@ -45,11 +45,6 @@ var verb = flag.Bool("verb", false, "verbosity")
var skipHidden = flag.Bool("k", true, "\nskip hidden files") var skipHidden = flag.Bool("k", true, "\nskip hidden files")
var ro = flag.Bool("ro", false, "read only mode (no upload, rename, move, etc...)") var ro = flag.Bool("ro", false, "read only mode (no upload, rename, move, etc...)")
type rpcCall struct {
Call string `json:"call"`
Args []string `json:"args"`
}
var rootPath = "" var rootPath = ""
var handler http.Handler var handler http.Handler
@ -134,7 +129,7 @@ func replyList(w http.ResponseWriter, r *http.Request, fullPath string, path str
if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") { if strings.Contains(r.Header.Get("Accept-Encoding"), "gzip") {
w.Header().Set("Content-Type", "text/html") w.Header().Set("Content-Type", "text/html")
w.Header().Add("Content-Encoding", "gzip") w.Header().Add("Content-Encoding", "gzip")
gz, err := gzip.NewWriterLevel(w, gzip.BestSpeed) // BestSpeed is Much Faster than default - base on a very unscientific local test, and only ~30% larger (compression remains still very effective, ~6x) gz, err := gzip.NewWriterLevel(w, gzip.BestSpeed) // BestSpeed is Much Faster than default - base on a very unscientific local test
check(err) check(err)
defer gz.Close() defer gz.Close()
tmpl.Execute(gz, p) tmpl.Execute(gz, p)
@ -151,7 +146,8 @@ func doContent(w http.ResponseWriter, r *http.Request) {
path := html.UnescapeString(r.URL.Path) path := html.UnescapeString(r.URL.Path)
defer exitPath(w, "get content", path) defer exitPath(w, "get content", path)
fullPath := enforcePath(path) fullPath, err := enforcePath(path)
check(err)
stat, errStat := os.Stat(fullPath) stat, errStat := os.Stat(fullPath)
check(errStat) check(errStat)
@ -174,7 +170,9 @@ func upload(w http.ResponseWriter, r *http.Request) {
if err != nil && err != io.EOF { // errs EOF when no more parts to process if err != nil && err != io.EOF { // errs EOF when no more parts to process
check(err) check(err)
} }
dst, err := os.Create(enforcePath(path)) path, err = enforcePath(path)
check(err)
dst, err := os.Create(path)
check(err) check(err)
io.Copy(dst, part) io.Copy(dst, part)
w.Write([]byte("ok")) w.Write([]byte("ok"))
@ -184,8 +182,9 @@ func zipRPC(w http.ResponseWriter, r *http.Request) {
zipPath := r.URL.Query().Get("zipPath") zipPath := r.URL.Query().Get("zipPath")
zipName := r.URL.Query().Get("zipName") zipName := r.URL.Query().Get("zipName")
defer exitPath(w, "zip", zipPath) defer exitPath(w, "zip", zipPath)
zipFullPath := enforcePath(zipPath) zipFullPath, err := enforcePath(zipPath)
_, err := os.Lstat(zipFullPath) check(err)
_, err = os.Lstat(zipFullPath)
check(err) check(err)
w.Header().Add("Content-Disposition", "attachment; filename=\""+zipName+".zip\"") w.Header().Add("Content-Disposition", "attachment; filename=\""+zipName+".zip\"")
zipWriter := zip.NewWriter(w) zipWriter := zip.NewWriter(w)
@ -203,7 +202,7 @@ func zipRPC(w http.ResponseWriter, r *http.Request) {
return nil // hidden files not allowed return nil // hidden files not allowed
} }
if f.Mode()&os.ModeSymlink != 0 { if f.Mode()&os.ModeSymlink != 0 {
panic(errors.New("symlink not allowed in zip downloads")) // filepath.Walk doesnt support symlinks check(errors.New("symlink not allowed in zip downloads")) // filepath.Walk doesnt support symlinks
} }
header, err := zip.FileInfoHeader(f) header, err := zip.FileInfoHeader(f)
@ -224,39 +223,52 @@ func zipRPC(w http.ResponseWriter, r *http.Request) {
} }
func rpc(w http.ResponseWriter, r *http.Request) { func rpc(w http.ResponseWriter, r *http.Request) {
var err error type rpcCall struct {
Call string `json:"call"`
Args []string `json:"args"`
}
var rpc rpcCall var rpc rpcCall
defer exitPath(w, "rpc", rpc) defer exitPath(w, "rpc", rpc)
bodyBytes, err := io.ReadAll(r.Body) bodyBytes, err := io.ReadAll(r.Body)
check(err) check(err)
json.Unmarshal(bodyBytes, &rpc) json.Unmarshal(bodyBytes, &rpc)
path0, err := enforcePath(rpc.Args[0])
path1 := ""
check(err)
if len(rpc.Args) > 1 {
path1, err = enforcePath(rpc.Args[1])
check(err)
}
if rpc.Call == "mkdirp" { if rpc.Call == "mkdirp" {
err = os.MkdirAll(enforcePath(rpc.Args[0]), os.ModePerm) err = os.MkdirAll(path0, os.ModePerm)
} else if rpc.Call == "mv" { } else if rpc.Call == "mv" && len(rpc.Args) == 2 {
err = os.Rename(enforcePath(rpc.Args[0]), enforcePath(rpc.Args[1])) err = os.Rename(path0, path1)
} else if rpc.Call == "rm" { } else if rpc.Call == "rm" {
err = os.RemoveAll(enforcePath(rpc.Args[0])) err = os.RemoveAll(path0)
} else {
err = errors.New("invalid rpc call")
} }
check(err) check(err)
w.Write([]byte("ok")) w.Write([]byte("ok"))
} }
func enforcePath(p string) string { func enforcePath(p string) (string, error) {
joined := filepath.Join(rootPath, strings.TrimPrefix(p, *extraPath)) joined := filepath.Join(rootPath, strings.TrimPrefix(p, *extraPath))
fp, err := filepath.Abs(joined) fp, err := filepath.Abs(joined)
sl, _ := filepath.EvalSymlinks(fp) // err skipped as it would error for unexistent files (RPC check). The actual behaviour is tested below sl, _ := filepath.EvalSymlinks(fp) // err skipped as it would error for inexistent files (RPC check). The actual behaviour is tested below
// panic if we had a error getting absolute path, // panic if we had a error getting absolute path,
// ... or if path doesnt contain the prefix path we expect, // ... or if path doesnt contain the prefix path we expect,
// ... or if we're skipping hidden folders, and one is requested, // ... or if we're skipping hidden folders, and one is requested,
// ... or if we're skipping symlinks, path exists, and a symlink out of bound requested // ... or if we're skipping symlinks, path exists, and a symlink out of bound requested
if err != nil || !strings.HasPrefix(fp, rootPath) || *skipHidden && strings.Contains(p, "/.") || !*symlinks && len(sl) > 0 && !strings.HasPrefix(sl, rootPath) { if err != nil || !strings.HasPrefix(fp, rootPath) || *skipHidden && strings.Contains(p, "/.") || !*symlinks && len(sl) > 0 && !strings.HasPrefix(sl, rootPath) {
panic(errors.New("invalid path")) return "", errors.New("invalid path")
} }
return fp return fp, nil
} }
func main() { func main() {

View file

@ -1 +1,2 @@
B!!! B!!!
test

35
ui/script.js vendored
View file

@ -153,8 +153,11 @@ function rpc (call, args, cb) {
xhr.open('POST', location.origin + window.extraPath + '/rpc') xhr.open('POST', location.origin + window.extraPath + '/rpc')
xhr.setRequestHeader('Content-Type', 'application/json;charset=UTF-8') xhr.setRequestHeader('Content-Type', 'application/json;charset=UTF-8')
xhr.send(JSON.stringify({ call, args })) xhr.send(JSON.stringify({ call, args }))
xhr.onload = cb xhr.onload = () => cb(false)
xhr.onerror = () => flicker(sadBadge) xhr.onerror = () => {
flicker(sadBadge)
cb(true)
}
} }
const mkdirCall = (path, cb) => rpc('mkdirp', [prependPath(path)], cb) const mkdirCall = (path, cb) => rpc('mkdirp', [prependPath(path)], cb)
@ -315,28 +318,36 @@ const textTypes = ['.txt', '.rtf', '.md', '.markdown', '.log', '.yaml', '.yml']
const isTextFile = src => src && textTypes.find(type => src.toLocaleLowerCase().includes(type)) const isTextFile = src => src && textTypes.find(type => src.toLocaleLowerCase().includes(type))
let fileEdited let fileEdited
function saveText (quitting) { function saveText (cb) {
const formData = new FormData() const formData = new FormData()
formData.append(fileEdited, editor.value) formData.append(fileEdited, editor.value)
const path = encodeURIComponent(decodeURI(location.pathname) + fileEdited) const fname = fileEdited + ".swp"
const path = encodeURIComponent(decodeURI(location.pathname) + fname)
upload(0, formData, path, () => { upload(0, formData, path, () => {
toast.style.display = 'none' toast.style.display = 'none'
if (!quitting) return cb()
clearInterval(window.padTimer)
window.onbeforeunload = null
resetView()
softPrev()
refresh()
}, () => { }, () => {
toast.style.display = 'block' toast.style.display = 'block'
if (!quitting) return
alert('cant save!\r\nleave window open to resume saving\r\nwhen connection back up') alert('cant save!\r\nleave window open to resume saving\r\nwhen connection back up')
}) })
} }
function padOff () { function padOff () {
if (!isEditorMode()) { return } if (!isEditorMode()) { return }
saveText(true) const swapfile = fileEdited + ".swp"
saveText(() => {
mvCall(prependPath(swapfile), prependPath(fileEdited), err => {
if (err) {
alert('cant save!\r\nleave window open to resume saving\r\nwhen connection back up')
return
}
clearInterval(window.padTimer)
window.onbeforeunload = null
resetView()
softPrev()
refresh()
})
})
return true return true
} }