migration: migrate NodeJS code base to Golang

This commit is contained in:
Mickael Kerjean 2018-07-18 14:20:30 +10:00
parent c5f2839fd7
commit 04c97e34fb
68 changed files with 3837 additions and 2217 deletions

1
.gitignore vendored
View file

@ -1,5 +1,4 @@
node_modules/
babel_cache/
dist/
.DS_Store
package-lock.json

View file

@ -40,19 +40,17 @@
# Getting started - Installation
Nuage can be used in different settings:
- Selfhosting ([documentation](https://github.com/mickael-kerjean/nuage/wiki/Installation:-Selfhosting)): install it somewhere you have full control (with docker, without docker, on a server or even android)
- Paas ([documentation](https://github.com/mickael-kerjean/nuage/wiki/Installation:-PaaS)): deployment on Heroku or AWS Lambda
- Saas ([documentation](https://github.com/mickael-kerjean/nuage/wiki/Installation:-SaaS)): official instance or private instance fully managed
# Support the project
- Bitcoin: `3LX5KGmSmHDj5EuXrmUvcg77EJxCxmdsgW`
- [Patreon](https://www.patreon.com/mickaelk)
# Documentation
- [FAQ](https://github.com/mickael-kerjean/nuage/wiki)
- [Customisation](https://github.com/mickael-kerjean/nuage/wiki/Customisation)
- [Release Notes](https://github.com/mickael-kerjean/nuage/wiki/Releases)
# Credits
- [Contributors](https://github.com/mickael-kerjean/nuage/graphs/contributors) and folks developing awesome [libraries](https://github.com/mickael-kerjean/nuage/blob/master/package.json)
- [Contributors](https://github.com/mickael-kerjean/nuage/graphs/contributors) and folks developing awesome libraries (libvips, libraw, ...)
- Logo by [ssnjrthegr8](https://github.com/ssnjrthegr8) and Iconography from [flaticon](https://www.flaticon.com/), [fontawesome](https://fontawesome.com) and [material](https://material.io/icons/)

Binary file not shown.

View file

@ -49,7 +49,7 @@ Data.prototype.get = function(type, path){
};
query.onerror = error;
});
});
}).catch(() => Promise.resolve(null))
}
Data.prototype.update = function(type, path, fn, exact = true){
@ -73,7 +73,7 @@ Data.prototype.update = function(type, path, fn, exact = true){
cursor.continue();
};
});
});
}).catch(() => Promise.resolve(null))
}
@ -93,7 +93,7 @@ Data.prototype.upsert = function(type, path, fn){
};
query.onerror = error;
});
});
}).catch(() => Promise.resolve(null))
}
Data.prototype.add = function(type, path, data){
@ -107,7 +107,7 @@ Data.prototype.add = function(type, path, data){
request.onsuccess = () => done(data);
request.onerror = (e) => error(e);
});
});
}).catch(() => Promise.resolve(null))
}
Data.prototype.remove = function(type, path, exact = true){
@ -139,7 +139,7 @@ Data.prototype.remove = function(type, path, exact = true){
};
});
}
});
}).catch(() => Promise.resolve(null))
}
Data.prototype.fetchAll = function(fn, type = this.FILE_PATH){
@ -156,11 +156,13 @@ Data.prototype.fetchAll = function(fn, type = this.FILE_PATH){
cursor.continue();
};
});
});
}).catch(() => Promise.resolve(null))
}
Data.prototype.destroy = function(){
this.db.then((db) => db.close())
this.db
.then((db) => db.close())
.catch(() => {})
clearTimeout(this.intervalId);
window.indexedDB.deleteDatabase('nuage');
this._init();
@ -168,4 +170,3 @@ Data.prototype.destroy = function(){
export const cache = new Data();
window._cache = cache;

View file

@ -1,9 +1,8 @@
import Path from 'path';
import db from '../../server/common/mimetype.json';
export function getMimeType(file){
let ext = Path.extname(file).replace(/^\./, '').toLowerCase();
let mime = db[ext];
let mime = CONFIG.mime[ext];
if(mime){
return mime;
}else{

View file

@ -24,14 +24,8 @@
<link rel="icon" type="image/png" sizes="96x96" href="/assets/logo/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/assets/logo/favicon-16x16.png">
<link rel="icon" href="/assets/logo/favicon.ico" type="image/x-icon" />
<link href="https://fonts.googleapis.com/css?family=Inconsolata" rel="stylesheet">
<script src="/api/config"></script>
<!--[if IE]>
<script src="https://cdn.jsdelivr.net/npm/es6-promise@4/dist/es6-promise.auto.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/babel-polyfill/6.26.0/polyfill.min.js"></script>
<![endif]-->
<meta name="msapplication-TileColor" content="#f2f2f2">
<meta name="msapplication-TileImage" content="/assets/logo/ms-icon-144x144.png">
<meta name="theme-color" content="#f2f2f2">

View file

@ -389,6 +389,9 @@ const S3Form = formHelper(function(props){
<NgIf cond={props.should_appear("path")}>
<Input value={props.values["path"] || ""} onChange={(e) => props.onChange("path", e.target.value)} type={props.input_type("path")} name="path" placeholder="Path" autoComplete="new-password" />
</NgIf>
<NgIf cond={props.should_appear("region")}>
<Input value={props.values["region"] || ""} onChange={(e) => props.onChange("region", e.target.value)} type={props.input_type("region")} name="region" placeholder="Region" autoComplete="new-password" />
</NgIf>
<NgIf cond={props.should_appear("endpoint")}>
<Input value={props.values["endpoint"] || ""} onChange={(e) => props.onChange("endpoint", e.target.value)} type={props.input_type("endpoint")} name="endpoint" placeholder="Endpoint" autoComplete="new-password" />
</NgIf>

View file

@ -123,7 +123,7 @@ export class ExistingThing extends React.Component {
const type = getMimeType(_path).split("/")[0];
if(type === "image"){
Files.url(_path).then((url) => {
this.setState({preview: url+"&size=250"});
this.setState({preview: url+"&thumbnail=true"});
});
}
}

View file

@ -262,7 +262,7 @@ class Img extends React.Component{
render(){
const image_url = (url, size) => {
return url+"&meta=true&size="+parseInt(window.innerWidth*size);
return url+"&size="+parseInt(window.innerWidth*size);
};
if(!this.props.src) return null;

View file

@ -1,4 +1,4 @@
const CACHE_NAME = 'v1.0';
const CACHE_NAME = 'v0.3';
const DELAY_BEFORE_SENDING_CACHE = 2000;
/*

56
config/config.json Normal file
View file

@ -0,0 +1,56 @@
{
"general": {
"port": 8334,
"host": "http://127.0.0.1:8334",
"secret_key": "example key 1234",
"editor": "emacs",
"fork_button": true,
"display_hidden": false,
"client_search_enable": true,
"client_search_per_min": 20
},
"log": {
"enable": true,
"level": "INFO",
"telemetry": true
},
"oauth": {
"gdrive": {
"client_id": "",
"client_secret": ""
},
"dropbox": {
"client_id": ""
}
},
"connections": [
{
"type": "webdav",
"label": "WebDav"
},
{
"type": "ftp",
"label": "FTP"
},
{
"type": "sftp",
"label": "SFTP"
},
{
"type": "git",
"label": "GIT"
},
{
"type": "s3",
"label": "S3"
},
{
"type": "dropbox",
"label": "Dropbox"
},
{
"type": "gdrive",
"label": "Drive"
}
]
}

View file

@ -1,35 +1,4 @@
const path = require('path');
module.exports.getMimeType = function(file){
let ext = path.extname(file).replace(/^\./, '').toLowerCase();
let mime = db[ext];
if(mime){
return mime;
}else{
return 'text/plain';
}
}
module.exports.opener = function(file){
let mime = getMimeType(file);
if(mime.split('/')[0] === 'text'){
return 'editor';
}else if(mime === 'application/pdf'){
return 'pdf';
}else if(mime.split('/')[0] === 'image'){
return 'image';
}else if(['application/javascript', 'application/xml', 'application/x-perl'].indexOf(mime) !== -1){
return 'editor';
}else if(['audio/wav', 'audio/mp3', 'audio/flac'].indexOf(mime) !== -1){
return 'audio';
}else if(['video/webm', 'video/mp4', 'application/ogg'].indexOf(mime) !== -1){
return 'video';
}else{
return 'download';
}
}
const db = {
{
"html": "text/html",
"shtml": "text/html",
"htm": "text/html",
@ -45,7 +14,6 @@ const db = {
"jpeg": "image/jpeg",
"svg": "image/svg",
"png": "image/png",
"svg": "image/svg+xml",
"svgz": "image/svg+xml",
"webp": "image/webp",
"gif": "image/gif",
@ -228,9 +196,7 @@ const db = {
"msi": "application/octet-stream",
"msm": "application/octet-stream",
"msp": "application/octet-stream",
"docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
"xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
"pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation"
};
module.exports.mime = db;
"docx": "application/word",
"xlsx": "application/excel",
"pptx": "application/powerpoint"
}

View file

@ -1,21 +1,56 @@
FROM alpine:latest
MAINTAINER mickael.kerjean@gmail.com
RUN apk add --no-cache git && \
# INSTALL SYSTEM DEPS
git clone https://github.com/mickael-kerjean/nuage /app && \
cd /app && \
apk add --no-cache nodejs libcurl && \
# Nodegit
apk --no-cache add --virtual .build-deps g++ libressl-dev make python curl-dev && \
BUILD_ONLY=true npm install nodegit > /dev/null && \
apk del .build-deps && \
RUN mkdir -p /tmp/go/src/github.com/mickael-kerjean/ && \
#################
# Dependencies
apk --no-cache --virtual .build-deps add make gcc g++ curl nodejs git go && \
mkdir /tmp/deps && \
# libvips #######
cd /tmp/deps && \
curl -L -X GET https://github.com/jcupitt/libvips/releases/download/v8.6.5/vips-8.6.5.tar.gz > libvips.tar.gz && \
tar -zxf libvips.tar.gz && \
cd vips-8.6.5/ && \
apk --no-cache add libexif-dev tiff-dev jpeg-dev libjpeg-turbo-dev libpng-dev librsvg-dev giflib-dev glib-dev fftw-dev glib-dev libc-dev expat-dev orc-dev && \
./configure && \
make -j 6 && \
make install && \
# libraw ########
cd /tmp/deps && \
curl -X GET https://www.libraw.org/data/LibRaw-0.19.0.tar.gz > libraw.tar.gz && \
tar -zxf libraw.tar.gz && \
cd LibRaw-0.19.0/ && \
./configure && \
make -j 6 && \
make install && \
#################
# Prepare Build
cd /tmp/go/src/github.com/mickael-kerjean && \
apk add --no-cache --virtual .build-deps git go nodejs && \
git clone --depth 1 https://github.com/mickael-kerjean/nuage && \
cd nuage && \
mkdir -p ./dist/data/ && \
mv config ./dist/data/ && \
#################
# Compile Frontend
npm install && \
# PRODUCTION BUILD
npm rebuild node-sass && \
NODE_ENV=production npm run build && \
npm prune --production
#################
# Compile Backend
cd /tmp/go/src/github.com/mickael-kerjean/nuage/server && \
CGO_CFLAGS_ALLOW='-fopenmp' GOPATH=/tmp/go go get && \
cd ../ && \
GOPATH=/tmp/go go build -o ./dist/nuage ./server/main.go && \
#################
# Finalise the build
apk --no-cache add ca-certificates && \
mv dist /app && \
cd /app && \
rm -rf /tmp/* && \
apk del .build-deps
EXPOSE 8334
VOLUME ["/app/data/config/"]
WORKDIR "/app"
ENV NODE_ENV production
CMD ["node", "/app/server/index"]
CMD ["/app/nuage"]

View file

@ -14,33 +14,7 @@
},
"author": "",
"license": "ISC",
"dependencies": {
"aws-sdk": "^2.59.0",
"body-parser": "^1.17.2",
"cookie-parser": "^1.4.3",
"cors": "^2.8.3",
"crypto": "0.0.3",
"express": "^4.15.3",
"express-winston": "^2.4.0",
"ftp": "^0.3.10",
"google-auth-library": "^0.10.0",
"googleapis": "^19.0.0",
"multiparty": "^4.1.3",
"node-ssh": "^4.2.2",
"nodegit": "^0.22.0",
"path": "^0.12.7",
"react-sticky": "^6.0.2",
"request": "^2.81.0",
"request-promise": "^4.2.1",
"scp2": "^0.5.0",
"ssh2-sftp-client": "^1.1.0",
"stream-to-string": "^1.1.0",
"string-to-stream": "^1.1.0",
"uglifyjs-webpack-plugin": "^1.2.5",
"webdav-fs": "^1.10.1",
"winston": "^2.3.1",
"winston-couchdb": "^0.6.3"
},
"dependencies": {},
"devDependencies": {
"assert": "^1.4.1",
"babel-cli": "^6.11.4",
@ -88,12 +62,14 @@
"react-infinite-scroller": "^1.1.4",
"react-router": "^4.1.1",
"react-router-dom": "^4.1.1",
"react-sticky": "^6.0.2",
"requirejs": "^2.3.5",
"rx-lite": "^4.0.8",
"rxjs": "^5.4.0",
"sass-loader": "^6.0.6",
"sass-variable-loader": "^0.1.2",
"style-loader": "^0.20.2",
"uglifyjs-webpack-plugin": "^1.2.5",
"url-loader": "^0.6.2",
"video.js": "^5.19.2",
"videojs-contrib-hls": "^5.14.1",

59
server/bootstrap.js vendored
View file

@ -1,59 +0,0 @@
var bodyParser = require('body-parser'),
cookieParser = require('cookie-parser'),
cors = require('cors'),
config = require('../config_server'),
express = require('express'),
winston = require('winston'),
expressWinston = require('express-winston');
require('winston-couchdb');
var app = express();
app.enable('trust proxy')
app.disable('x-powered-by');
app.use(cookieParser());
app.use(bodyParser.json());
if(process.env.NODE_ENV === 'production'){
var transports = [
new winston.transports.Console({
json: false,
colorize: false
})
];
if(config.info.usage_stats === true){
transports.push(new winston.transports.Couchdb({
host: 'log.kerjean.me',
db: 'log_nuage',
port: 443,
ssl: true,
}));
}
app.use(expressWinston.logger({
transports: transports,
requestWhitelist: [],
responseWhitelist: [],
meta: true,
exitOnError: false,
msg: "HTTP {{res.statusCode}} {{req.method}} {{req.url}} {{res.responseTime}}ms",
expressFormat: true,
colorize: false,
ignoreRoute: function (req, res) {
return /^\/api\//.test(req.originalUrl)? false : true;
},
dynamicMeta: function(req, res) {
return {
host: req.hostname,
protocol: req.protocol,
method:req.method,
pathname: req.originalUrl,
ip: req.ip,
referrer: req.get('Referrer'),
status: res.statusCode,
}
}
}));
}
module.exports = app;

39
server/common/app.go Normal file
View file

@ -0,0 +1,39 @@
package common
import (
"net"
"net/http"
"os"
"path/filepath"
"time"
)
type App struct {
Config *Config
Helpers *Helpers
Backend IBackend
Body map[string]string
Session map[string]string
}
func GetCurrentDir() string {
ex, _ := os.Executable()
return filepath.Dir(ex)
}
var HTTPClient = http.Client{
Timeout: 5 * time.Hour,
Transport: &http.Transport{
Dial: (&net.Dialer{
Timeout: 10 * time.Second,
KeepAlive: 10 * time.Second,
}).Dial,
TLSHandshakeTimeout: 5 * time.Second,
IdleConnTimeout: 60 * time.Second,
ResponseHeaderTimeout: 60 * time.Second,
},
}
var HTTP = http.Client{
Timeout: 800 * time.Millisecond,
}

50
server/common/cache.go Normal file
View file

@ -0,0 +1,50 @@
package common
import (
"fmt"
"github.com/mitchellh/hashstructure"
"github.com/patrickmn/go-cache"
"time"
)
type AppCache struct {
Cache *cache.Cache
}
func (a *AppCache) Get(key interface{}) interface{} {
hash, err := hashstructure.Hash(key, nil)
if err != nil {
return nil
}
value, found := a.Cache.Get(fmt.Sprint(hash))
if found == false {
return nil
}
return value
}
func (a *AppCache) Set(key map[string]string, value interface{}) {
hash, err := hashstructure.Hash(key, nil)
if err != nil {
return
}
a.Cache.Set(fmt.Sprint(hash), value, cache.DefaultExpiration)
}
func (a *AppCache) OnEvict(fn func(string, interface{})) {
a.Cache.OnEvicted(fn)
}
func NewAppCache(arg ...time.Duration) AppCache {
var retention time.Duration = 5
var cleanup time.Duration = 10
if len(arg) > 0 {
retention = arg[0]
if len(arg) > 1 {
cleanup = arg[1]
}
}
c := AppCache{}
c.Cache = cache.New(retention*time.Minute, cleanup*time.Minute)
return c
}

188
server/common/config.go Normal file
View file

@ -0,0 +1,188 @@
package common
import (
"encoding/json"
"github.com/fsnotify/fsnotify"
"log"
"os"
"path/filepath"
)
const (
CONFIG_PATH = "data/config/"
APP_VERSION = "v0.3"
)
func NewConfig() *Config {
c := Config{}
c.Initialise()
return &c
}
type Config struct {
General struct {
Port int `json:"port"`
Host string `json:"host"`
SecretKey string `json:"secret_key"`
Editor string `json:"editor"`
ForkButton bool `json:"fork_button"`
DisplayHidden bool `json:"display_hidden"`
} `json:"general"`
Log struct {
Enable bool `json:"enable"`
Level string `json:"level"`
Telemetry bool `json:"telemetry"`
} `json:"log"`
OAuthProvider struct {
Dropbox struct {
ClientID string `json:"client_id"`
} `json:"dropbox"`
GoogleDrive struct {
ClientID string `json:"client_id"`
ClientSecret string `json:"client_secret"`
} `json:"gdrive"`
} `json:"oauth"`
Connections []struct {
Type string `json:"type"`
Label string `json:"label"`
Hostname *string `json:"hostname,omitempty"`
Username *string `json:"username,omitempty"`
Password *string `json:"password,omitempty"`
Url *string `json:"url,omitempty"`
Advanced *bool `json:"advanced,omitempty"`
Port *uint `json:"port,omitempty"`
Path *string `json:"path,omitempty"`
Passphrase *string `json:"passphrase,omitempty"`
SecretAccessKey *string `json:"secret_access_key,omitempty"`
AccessKeyId *string `json:"access_key_id,omitempty"`
Endpoint *string `json:"endpoint,omitempty"`
Commit *string `json:"commit,omitempty"`
Branch *string `json:"branch,omitempty"`
AuthorEmail *string `json:"author_email,omitempty"`
AuthorName *string `json:"author_name,omitempty"`
CommitterEmail *string `json:"committer_email,omitempty"`
CommitterName *string `json:"committter_name,omitempty"`
} `json:"connections"`
Runtime struct {
Dirname string
ConfigPath string
FirstSetup bool
} `-`
MimeTypes map[string]string `json:"mimetypes"`
}
func (c *Config) Initialise() {
c.Runtime.Dirname = GetCurrentDir()
c.Runtime.ConfigPath = filepath.Join(c.Runtime.Dirname, CONFIG_PATH)
os.MkdirAll(c.Runtime.ConfigPath, os.ModePerm)
if err := c.loadConfig(filepath.Join(c.Runtime.ConfigPath, "config.json")); err != nil {
log.Println("> Can't load configuration file")
}
if err := c.loadMimeType(filepath.Join(c.Runtime.ConfigPath, "mime.json")); err != nil {
log.Println("> Can't load mimetype config")
}
go c.ChangeListener()
}
func (c *Config) loadConfig(path string) error {
file, err := os.Open(path)
defer file.Close()
if err != nil {
c = &Config{}
log.Println("can't load config file")
return err
}
decoder := json.NewDecoder(file)
err = decoder.Decode(&c)
if err != nil {
return err
}
c.populateDefault(path)
return nil
}
func (c *Config) ChangeListener() {
watcher, err := fsnotify.NewWatcher()
if err != nil {
log.Fatal(err)
}
defer watcher.Close()
done := make(chan bool)
go func() {
for {
select {
case event := <-watcher.Events:
if event.Op&fsnotify.Write == fsnotify.Write {
config_path := filepath.Join(c.Runtime.ConfigPath, "config.json")
if err = c.loadConfig(config_path); err != nil {
log.Println("can't load config file")
} else {
c.populateDefault(config_path)
}
}
}
}
}()
_ = watcher.Add(c.Runtime.ConfigPath)
<-done
}
func (c *Config) populateDefault(path string) {
if c.General.Port == 0 {
c.General.Port = 8334
}
if c.General.SecretKey == "" {
c.General.SecretKey = RandomString(16)
j, err := json.Marshal(c)
if err == nil {
f, err := os.OpenFile(path, os.O_WRONLY, os.ModePerm)
if err == nil {
f.Write(j)
f.Close()
}
}
}
if c.OAuthProvider.Dropbox.ClientID == "" {
c.OAuthProvider.Dropbox.ClientID = os.Getenv("DROPBOX_CLIENT_ID")
}
if c.OAuthProvider.GoogleDrive.ClientID == "" {
c.OAuthProvider.GoogleDrive.ClientID = os.Getenv("GDRIVE_CLIENT_ID")
}
if c.OAuthProvider.GoogleDrive.ClientSecret == "" {
c.OAuthProvider.GoogleDrive.ClientSecret = os.Getenv("GDRIVE_CLIENT_SECRET")
}
if c.General.Host == "" {
c.General.Host = os.Getenv("APPLICATION_URL")
}
}
func (c *Config) Export() (string, error) {
publicConf := struct {
Editor string `json:"editor"`
ForkButton bool `json:"fork_button"`
DisplayHidden bool `json:"display_hidden"`
Connections interface{} `json:"connections"`
MimeTypes map[string]string `json:"mime"`
}{
Editor: c.General.Editor,
ForkButton: c.General.ForkButton,
DisplayHidden: c.General.DisplayHidden,
Connections: c.Connections,
MimeTypes: c.MimeTypes,
}
j, err := json.Marshal(publicConf)
if err != nil {
return "", err
}
return string(j), nil
}
func (c *Config) loadMimeType(path string) error {
file, err := os.Open(path)
defer file.Close()
if err != nil {
return err
}
decoder := json.NewDecoder(file)
return decoder.Decode(&c.MimeTypes)
}

109
server/common/error.go Normal file
View file

@ -0,0 +1,109 @@
package common
import (
"fmt"
)
func NewError(message string, status int) error {
return AppError{message, status}
}
type AppError struct {
message string
status int
}
func (e AppError) Error() string {
return fmt.Sprintf("%s", e.message)
}
func (e AppError) Status() int {
return e.status
}
func HTTPFriendlyStatus(n int) string {
if n < 400 && n > 600 {
return "Humm"
}
switch n {
case 400:
return "Bad Request"
case 401:
return "Unauthorized"
case 402:
return "Payment Required"
case 403:
return "Forbidden"
case 404:
return "Not Found"
case 405:
return "Not Allowed"
case 406:
return "Not Acceptable"
case 407:
return "Authentication Required"
case 408:
return "Timeout"
case 409:
return "Conflict"
case 410:
return "Gone"
case 411:
return "Length Required"
case 412:
return "Failed"
case 413:
return "Too Large"
case 414:
return "URI Too Long"
case 415:
return "Unsupported Media"
case 416:
return "Not Like This"
case 417:
return "Unexpected"
case 418:
return "I'm a teapot"
case 421:
return "Redirection Problem"
case 422:
return "Unprocessable"
case 423:
return "Locked"
case 424:
return "Failed Dependency"
case 426:
return "Upgrade Required"
case 428:
return "Need Something"
case 429:
return "Too Many Requests"
case 431:
return "Request Too Large"
case 451:
return "Not Available"
case 500:
return "Internal Server Error"
case 501:
return "Not Implemented"
case 502:
return "Bad Gateway"
case 503:
return "Service Unavailable"
case 504:
return "Gateway Timeout"
case 505:
return "Unsupported HTTP Version"
case 506:
return "Need To Negotiate"
case 507:
return "Insufficient Storage"
case 508:
return "Loop Detected"
case 510:
return "Not Extended"
case 511:
return "Authentication Required"
default:
return "Oops"
}
}

8
server/common/files.go Normal file
View file

@ -0,0 +1,8 @@
package common
func IsDirectory(path string) bool {
if string(path[len(path)-1]) != "/" {
return false
}
return true
}

39
server/common/helpers.go Normal file
View file

@ -0,0 +1,39 @@
package common
import (
"path/filepath"
"strings"
)
type Helpers struct {
AbsolutePath func(p string) string
MimeType func(p string) string
}
func NewHelpers(config *Config) *Helpers {
return &Helpers{
MimeType: mimeType(config),
AbsolutePath: absolutePath(config),
}
}
func absolutePath(c *Config) func(p string) string {
return func(p string) string {
return filepath.Join(c.Runtime.Dirname, p)
}
}
func mimeType(c *Config) func(p string) string {
return func(p string) string {
ext := filepath.Ext(p)
if ext != "" {
ext = ext[1:]
}
ext = strings.ToLower(ext)
mType := c.MimeTypes[ext]
if mType == "" {
return "application/octet-stream"
}
return mType
}
}

29
server/common/log.go Normal file
View file

@ -0,0 +1,29 @@
package common
import (
"fmt"
"io"
"io/ioutil"
"time"
)
type LogEntry struct {
Host string `json:"host"`
Method string `json:"method"`
RequestURI string `json:"pathname"`
Proto string `json:"proto"`
Status int `json:"status"`
Scheme string `json:"scheme"`
UserAgent string `json:"userAgent"`
Ip string `json:"ip"`
Referer string `json:"referer"`
Timestamp time.Time `json:"_id"`
Duration int64 `json:"responseTime"`
Version string `json:"version"`
Backend string `json:"backend"`
}
func Debug_reader(r io.Reader) {
a, _ := ioutil.ReadAll(r)
fmt.Println("> DEBUG:", string(a))
}

60
server/common/types.go Normal file
View file

@ -0,0 +1,60 @@
package common
import (
"io"
"os"
"time"
)
type IBackend interface {
Ls(path string) ([]os.FileInfo, error)
Cat(path string) (io.Reader, error)
Mkdir(path string) error
Rm(path string) error
Mv(from string, to string) error
Save(path string, file io.Reader) error
Touch(path string) error
Info() string
}
type File struct {
FName string `json:"name"`
FType string `json:"type"`
FTime int64 `json:"time"`
FSize int64 `json:"size"`
CanRename *bool `json:"can_rename,omitempty"`
CanMove *bool `json:"can_move_directory,omitempty"`
CanDelete *bool `json:"can_delete,omitempty"`
}
func (f File) Name() string {
return f.FName
}
func (f File) Size() int64 {
return f.FSize
}
func (f File) Mode() os.FileMode {
return 0
}
func (f File) ModTime() time.Time {
return time.Now()
}
func (f File) IsDir() bool {
if f.FType != "directory" {
return false
}
return true
}
func (f File) Sys() interface{} {
return nil
}
type Metadata struct {
CanSee *bool `json:"can_read,omitempty"`
CanCreateFile *bool `json:"can_create_file,omitempty"`
CanCreateDirectory *bool `json:"can_create_directory,omitempty"`
CanRename *bool `json:"can_rename,omitempty"`
CanMove *bool `json:"can_move,omitempty"`
CanUpload *bool `json:"can_upload,omitempty"`
Expire *time.Time `json:"-"`
}

19
server/common/utils.go Normal file
View file

@ -0,0 +1,19 @@
package common
import (
"math/rand"
)
var Letters = []rune("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
func RandomString(n int) string {
b := make([]rune, n)
for i := range b {
b[i] = Letters[rand.Intn(len(Letters))]
}
return string(b)
}
func NewBool(t bool) *bool {
return &t
}

View file

@ -1,203 +0,0 @@
var express = require('express'),
app = express.Router(),
path = require('path'),
crypto = require('../utils/crypto'),
Files = require('../model/files'),
config = require('../../config_server'),
multiparty = require('multiparty'),
request = require('request'),
FormData = require('form-data'),
mime = require('../utils/mimetype.js');
app.use(function(req, res, next){
req.cookies.auth = crypto.decrypt(req.cookies.auth);
if(req.cookies.auth !== null){
return next();
}else{
return res.status(401).send({status: "error", message: "You need to authenticate first"});
}
});
// list files
app.get('/ls', function(req, res){
const path = pathBuilder(req);
if(path){
Files
.ls(path, req.cookies.auth)
.then(function(results){ res.send({status: 'ok', results: results}); })
.catch(function(err){ errorHandler(res, err, 'cannot fetch files'); });
}else{
res.send({status: 'error', message: 'unknown path'});
}
});
// get a file content
app.get('/cat', function(req, res){
const path = pathBuilder(req);
res.clearCookie("download");
if(path){
Files.cat(path, req.cookies.auth, res)
.then(function(_stream){
_stream = _stream.on('error', function (error) {
let status = 404;
if(error && typeof error.status === "number"){
status = error.status;
}
res.status(status).send({status: status, message: "There's nothing here"});
if(typeof this.end === "function") this.end();
});
const mType = mime.getMimeType(path);
res.set('Content-Type', mType);
if(!config.transcoder.url){ return _stream.pipe(res); }
if(/^image\//.test(mType) && ["image/gif", "image/svg", "image/x-icon"].indexOf(mType) === -1){
const form = new FormData();
form.append('image', _stream, {
filename: 'tmp',
contentType: mType,
});
let endpoint = config.transcoder.url;
if(req.query.size){
endpoint += "?size="+req.query.size+"&meta="+(req.query.meta === "true" ? "true": "false");
}
const post_request = request({
method: "POST",
url: endpoint,
headers: form.getHeaders()
});
return form.pipe(post_request)
.on('error', (err) => {
res.status(500).end();
})
.pipe(res);
}
return _stream.pipe(res);
})
.catch(function(err){ errorHandler(res, err, 'couldn\'t read the file'); });
}else{
res.send({status: 'error', message: 'unknown path'});
}
});
// create/update a file
// https://github.com/pillarjs/multiparty
app.post('/cat', function(req, res){
const form = new multiparty.Form(),
path = pathBuilder(req);
if(path){
form.on('part', function(part) {
part.on('error', function(err){
errorHandler(res, {code: "INTERNAL_ERROR", message: "internal error"}, 'internal error');
});
Files.write(path, part, req.cookies.auth)
.then(function(result){
res.send({status: 'ok'});
})
.catch(function(err){ errorHandler(res, err, 'couldn\'t write the file'); });
});
form.parse(req);
}else{
res.send({status: 'error', message: 'unknown path'});
}
});
// rename a file/directory
app.get('/mv', function(req, res){
req.query.path = req.query.from;
const from = pathBuilder(req);
req.query.path = req.query.to;
const to = pathBuilder(req)
if(from === to){
res.send({status: 'ok'});
}else if(from && to){
Files.mv(from, to, req.cookies.auth)
.then(function(message){ res.send({status: 'ok'}); })
.catch(function(err){ errorHandler(res, err, 'couldn\'t rename your file'); });
}else{
res.send({status: 'error', message: 'unknown path'});
}
});
// delete a file/directory
app.get('/rm', function(req, res){
const path = pathBuilder(req);
if(path){
Files.rm(path, req.cookies.auth)
.then(function(message){ res.send({status: 'ok'}); })
.catch(function(err){ errorHandler(res, err, 'couldn\'t delete your file'); });
}else{
res.send({status: 'error', message: 'unknown path'});
}
});
// create a directory
app.get('/mkdir', function(req, res){
const path = pathBuilder(req);
if(path){
Files.mkdir(path, req.cookies.auth)
.then(function(message){ res.send({status: 'ok'}); })
.catch(function(err){ errorHandler(res, err, 'couldn\'t create the directory'); });
}else{
res.send({status: 'error', message: 'unknown path'});
}
});
app.get('/touch', function(req, res){
const path = pathBuilder(req);
if(path){
Files.touch(path, req.cookies.auth)
.then(function(message){ res.send({status: 'ok'}); })
.catch(function(err){ errorHandler(res, err, 'couldn\'t create the file'); });
}else{
res.send({status: 'error', message: 'unknown path'});
}
});
module.exports = app;
function pathBuilder(req){
return path.posix.join(req.cookies.auth.payload.path || '', decodeURIComponent(req.query.path) || '');
}
function errorHandler(res, err, defaultMessage){
const code = {
"INTERNAL_ERROR": {message: "Oops, it seems we had a problem", status: 500},
"ECONNREFUSED": {message: "Oops, the service you are connected on is not available", status: 502}
};
const status = function(_code, _status){
if(code[_code]){
return code[_code]['status'];
}
_status = parseInt(_status);
if(_status >= 400 && _status < 600){
return _status;
}
return 404;
}(err.code || err.errno, err.status);
if(code[err.code || err.errno]){
res.status(status).send({
status: 'error',
message: code[err.code]['message']
});
}else if(err.message){
res.status(status).send({
status: 'error',
message: err.message || 'cannot fetch files',
trace: err
});
}else{
res.status(status).send({
status: 'error',
message: defaultMessage,
trace: err
});
}
}

View file

@ -1,64 +0,0 @@
var express = require('express'),
app = express.Router(),
crypto = require('../utils/crypto'),
Session = require('../model/session'),
http = require('request-promise');
app.get('/', function(req, res){
let data = crypto.decrypt(req.cookies.auth);
if(data && data.type){
res.send({status: 'ok', result: true})
}else{
res.send({status: 'ok', result: false})
}
});
app.post('/', function(req, res){
Session.test(req.body)
.then((state) => {
if(!state.path) state.path = "";
else{ state.path = state.path.replace(/\/$/, ''); }
let persist = {
type: req.body.type,
payload: state
};
const cookie = crypto.encrypt(persist);
if(Buffer.byteLength(cookie, 'utf-8') > 4096){
res.status(413).send({status: 'error', message: 'we can\'t authenticate you'})
}else{
res.cookie('auth', crypto.encrypt(persist), { maxAge: 365*24*60*60*1000, httpOnly: true, path: "/api/" });
res.send({status: 'ok'});
}
})
.catch((err) => {
let message = function(err){
let t = err && err.message || 'could not establish a connection';
if(err.code){
t += ' ('+err.code+')';
}
return t;
}
res.status(401).send({status: 'error', message: message(err), code: err.code});
});
});
app.delete('/', function(req, res){
res.clearCookie("auth", {path: "/api/"});
// TODO in May 2019: remove the line below which was inserted to mitigate a cookie migration issue.
res.clearCookie("auth"); // the issue was a change in the cookie path which would have make
// impossible for an existing user to logout
res.send({status: 'ok'});
});
app.get('/auth/:id', function(req, res){
Session.auth({type: req.params.id})
.then((url) => {
res.send({status: 'ok', result: url});
})
.catch((err) => {
res.status(404).send({status: 'error', message: 'can\'t get authorization url', trace: err});
});
});
module.exports = app;

View file

@ -1,19 +0,0 @@
var app = require('./bootstrap'),
express = require('express'),
filesRouter = require('./ctrl/files'),
sessionRouter = require('./ctrl/session'),
fs = require('fs');
app.get('/api/ping', function(req, res){ res.send('pong')})
app.use('/api/files', filesRouter)
app.use('/api/session', sessionRouter);
app.use('/', express.static(__dirname + '/public/'))
app.use('/*', function (req, res){
fs.createReadStream(__dirname + '/public/index.html').pipe(res);
});
app.listen(8334, function(err){
if(err){ console.log(err); }
else{ console.log("Running: http://127.0.0.1:8334"); }
});

50
server/main.go Normal file
View file

@ -0,0 +1,50 @@
package main
import (
//"context"
//"github.com/getlantern/systray"
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/mickael-kerjean/nuage/server/router"
//"github.com/pkg/browser"
//"io/ioutil"
"strconv"
)
var APP_URL string
func main() {
app := App{}
app.Config = NewConfig()
app.Helpers = NewHelpers(app.Config)
router.Init(&app)
APP_URL = "http://" + app.Config.General.Host + ":" + strconv.Itoa(app.Config.General.Port)
// systray.Run(setupSysTray(&app), func() {
// srv.Shutdown(context.TODO())
// })
select {}
}
// func setupSysTray(a *App) func() {
// return func() {
// b, err := ioutil.ReadFile(a.Config.Runtime.AbsolutePath("data/public/assets/logo/favicon.ico"))
// if err != nil {
// return
// }
// systray.SetIcon(b)
// mOpen := systray.AddMenuItem("Open", "Open in a browser")
// mQuit := systray.AddMenuItem("Quit", "Quit the whole app")
// go func() {
// for {
// select {
// case <-mOpen.ClickedCh:
// browser.OpenURL(APP_URL)
// case <-mQuit.ClickedCh:
// systray.Quit()
// return
// }
// }
// }()
// }
// }

View file

@ -0,0 +1,201 @@
package backend
import (
"encoding/json"
. "github.com/mickael-kerjean/nuage/server/common"
"io"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"regexp"
"strings"
"time"
)
type Dropbox struct {
ClientId string
Hostname string
Bearer string
}
func NewDropbox(params map[string]string, app *App) (IBackend, error) {
backend := Dropbox{}
backend.ClientId = app.Config.OAuthProvider.Dropbox.ClientID
backend.Hostname = app.Config.General.Host
backend.Bearer = params["bearer"]
if backend.ClientId == "" {
return backend, NewError("Missing ClientID: Contact your admin", 502)
} else if backend.Hostname == "" {
return backend, NewError("Missing Hostname: Contact your admin", 502)
}
return backend, nil
}
func (d Dropbox) Info() string {
return "dropbox"
}
func (d Dropbox) OAuthURL() string {
url := "https://www.dropbox.com/oauth2/authorize?"
url += "client_id=" + d.ClientId
url += "&redirect_uri=" + d.Hostname + "/login"
url += "&response_type=token"
url += "&state=dropbox"
return url
}
func (d Dropbox) Ls(path string) ([]os.FileInfo, error) {
files := make([]os.FileInfo, 0)
args := struct {
Path string `json:"path"`
Recursive bool `json:"recursive"`
IncludeDeleted bool `json:"include_deleted"`
IncludeMediaInfo bool `json:"include_media_info"`
}{d.path(path), false, false, true}
res, err := d.request("POST", "https://api.dropboxapi.com/2/files/list_folder", d.toReader(args), nil)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode >= 400 {
return nil, NewError(HTTPFriendlyStatus(res.StatusCode)+": can't get things in"+filepath.Base(path), res.StatusCode)
}
var r struct {
Files []struct {
Type string `json:".tag"`
Name string `json:"name"`
Time time.Time `json:"client_modified"`
Size uint `json:"size"`
} `json:"entries"`
}
decoder := json.NewDecoder(res.Body)
decoder.Decode(&r)
for _, obj := range r.Files {
files = append(files, File{
FName: obj.Name,
FType: func(p string) string {
if p == "folder" {
return "directory"
}
return "file"
}(obj.Type),
FTime: obj.Time.UnixNano() / 1000,
FSize: int64(obj.Size),
})
}
return files, nil
}
func (d Dropbox) Cat(path string) (io.Reader, error) {
res, err := d.request("POST", "https://content.dropboxapi.com/2/files/download", nil, func(req *http.Request) {
arg := struct {
Path string `json:"path"`
}{d.path(path)}
json, _ := ioutil.ReadAll(d.toReader(arg))
req.Header.Set("Dropbox-API-Arg", string(json))
})
if err != nil {
return nil, err
}
return res.Body, nil
}
func (d Dropbox) Mkdir(path string) error {
args := struct {
Path string `json:"path"`
Autorename bool `json:"autorename"`
}{d.path(path), false}
res, err := d.request("POST", "https://api.dropboxapi.com/2/files/create_folder_v2", d.toReader(args), nil)
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode >= 400 {
return NewError(HTTPFriendlyStatus(res.StatusCode)+": can't create "+filepath.Base(path), res.StatusCode)
}
return nil
}
func (d Dropbox) Rm(path string) error {
args := struct {
Path string `json:"path"`
}{d.path(path)}
res, err := d.request("POST", "https://api.dropboxapi.com/2/files/delete_v2", d.toReader(args), nil)
if res.StatusCode >= 400 {
return NewError(HTTPFriendlyStatus(res.StatusCode)+": can't remove "+filepath.Base(path), res.StatusCode)
}
res.Body.Close()
return err
}
func (d Dropbox) Mv(from string, to string) error {
args := struct {
FromPath string `json:"from_path"`
ToPath string `json:"to_path"`
}{d.path(from), d.path(to)}
res, err := d.request("POST", "https://api.dropboxapi.com/2/files/move_v2", d.toReader(args), nil)
if res.StatusCode >= 400 {
return NewError(HTTPFriendlyStatus(res.StatusCode)+": can't do that", res.StatusCode)
}
res.Body.Close()
return err
}
func (d Dropbox) Touch(path string) error {
return d.Save(path, strings.NewReader(""))
}
func (d Dropbox) Save(path string, file io.Reader) error {
res, err := d.request("POST", "https://content.dropboxapi.com/2/files/upload", file, func(req *http.Request) {
arg := struct {
Path string `json:"path"`
AutoRename bool `json:"autorename"`
Mode string `json:"mode"`
}{d.path(path), false, "overwrite"}
json, _ := ioutil.ReadAll(d.toReader(arg))
req.Header.Set("Dropbox-API-Arg", string(json))
req.Header.Set("Content-Type", "application/octet-stream")
})
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode >= 400 {
return NewError(HTTPFriendlyStatus(res.StatusCode)+": can't do that", res.StatusCode)
}
return err
}
func (d Dropbox) request(method string, url string, body io.Reader, fn func(*http.Request)) (*http.Response, error) {
req, err := http.NewRequest(method, url, body)
if err != nil {
return nil, err
}
req.Header.Set("Authorization", "Bearer "+d.Bearer)
if fn == nil {
req.Header.Set("Content-Type", "application/json")
} else {
fn(req)
}
if req.Body != nil {
defer req.Body.Close()
}
return HTTPClient.Do(req)
}
func (d Dropbox) toReader(a interface{}) io.Reader {
j, err := json.Marshal(a)
if err != nil {
return nil
}
return strings.NewReader(string(j))
}
func (d Dropbox) path(path string) string {
return regexp.MustCompile(`\/$`).ReplaceAllString(path, "")
}

View file

@ -1,161 +0,0 @@
// doc: https://www.dropbox.com/developers/documentation/http/documentation
var http = require('request-promise'),
http_stream = require('request'),
Path = require('path'),
config = require('../../../config_server'),
toString = require('stream-to-string'),
Readable = require('stream').Readable;
function query(params, uri, method = 'GET', data, opts = {}){
if(!opts.headers) opts.headers = {};
opts.headers['Authorization'] = 'Bearer '+params.bearer;
opts.uri = uri;
opts.method = method;
if(data && typeof data === 'object'){
opts.body = JSON.stringify(data);
opts.headers["Content-Type"] = "application/json";
}
return http(opts)
.then((res) => Promise.resolve(JSON.parse(res)))
.catch((res) => {
if(res && res.response && res.response.body){
return Promise.reject(res.response.body);
}else{
return Promise.reject(res);
}
})
}
function query_stream(params, uri, method = 'GET', data, opts = {}){
if(!opts.headers) opts.headers = {};
opts.headers['Authorization'] = 'Bearer '+params.bearer;
opts.uri = uri;
opts.method = method;
opts.body = data;
return Promise.resolve(http_stream(opts));
}
module.exports = {
auth: function(params){
let url = "https://www.dropbox.com/oauth2/authorize?client_id="+config.dropbox.clientID+"&response_type=token&redirect_uri="+config.dropbox.redirectURI+"&state=dropbox"
return Promise.resolve(url)
},
test: function(params){
return query(params, "https://api.dropboxapi.com/2/users/get_current_account", "POST")
.then((opts) => Promise.resolve(params))
.catch((err) => Promise.reject({message: 'Dropbox didn\'t gave us access to your account', code: "NOT_AUTHENTICATED"}))
},
cat: function(path, params){
return query_stream(params, "https://content.dropboxapi.com/2/files/download", "POST", null, {
headers: {
"Dropbox-API-Arg": JSON.stringify({path: path})
}
}).then((res) => {
// dropbox send silly mimetype like 'application/octet-stream' for pdf files ...
// We can't trust them on this, so we get rid of it. In our case, it will be set by the file controller
const newRes = res.on('response', function(res) {
delete res.headers['content-type'];
});
return Promise.resolve(newRes);
})
},
ls: function(path, params){
if(path === '/') path = '';
return query(params, "https://api.dropboxapi.com/2/files/list_folder", "POST", {path: path, recursive: false, include_deleted: false, include_media_info: true})
.then((res) => {
let files = res.entries.map((file) => {
let tmp = {
size: file.size,
time: new Date(file.client_modified).getTime(),
type: file['.tag'] === 'file' ? 'file' : 'directory',
name: file.name
};
return tmp;
});
return Promise.resolve(files);
});
},
write: function(path, content, params){
return write_file(path, content, params);
},
rm: function(path, params){
return query(params, "https://api.dropboxapi.com/2/files/delete_v2", "POST", {path: path})
.then((res) => Promise.resolve('ok'));
},
mv: function(from, to, params){
return query(params, "https://api.dropboxapi.com/2/files/move_v2", "POST", {from_path: from, to_path: to})
.then((res) => verifyDropbox(res, to, params, 10))
.catch(err => Promise.reject({message: JSON.parse(err).error, code: "DROPBOX_MOVE"}));
},
mkdir: function(path, params){
path = path.replace(/\/$/, '');
return query(params, "https://api.dropboxapi.com/2/files/create_folder_v2", "POST", {path: path, autorename: false})
.then((res) => verifyDropbox(res, path, params, 10))
.then((res) => Promise.resolve('ok'));
},
touch: function(path, params){
var stream = new Readable(); stream.push(''); stream.push(null);
return write_file(path, stream, params);
}
}
function write_file(path, content, params){
return process(path, content, params)
.then((res) => retryOnError(res, path, content, params, 5))
.then((res) => verifyDropbox(res, path, params, 10))
function process(path, content, params){
return query_stream(params, "https://content.dropboxapi.com/2/files/upload", "POST", content, {
headers: {
"Dropbox-API-Arg": JSON.stringify({
path: path,
autorename: false,
mode: "overwrite"
}),
"Content-Type": "application/octet-stream"
}
}).then(toString)
}
function retryOnError(body, path, content, params, n = 5){
body = JSON.parse(body);
if(body && body.error){
return sleep(Math.abs(5 - n) * 1000)
.then(() => process(path, content, params, n -1))
}else{
return Promise.resolve(body);
}
}
}
function verifyDropbox(keep, path, params, n = 10){
let folder_path = Path.posix.dirname(path).replace(/\/$/, '');
if(folder_path === '.'){
folder_path = '';
}
return sleep(Math.abs(10 - n) * 300)
.then(() => query(params, "https://api.dropboxapi.com/2/files/list_folder", "POST", {path: folder_path, recursive: false, include_deleted: false, include_media_info: true}))
.then((res) => {
let found = res.entries.find((function(file){
return file.path_display === path? true : false
}));
if(found){
return Promise.resolve(keep)
}else{
if(n > 0){
return verifyDropbox(keep, path, params, n - 1)
}else{
return Promise.reject({message: 'dropbox didn\' create the file or was taking too long to do so', code: 'DROPBOX_WRITE_ERROR'})
}
}
})
}
function sleep(t=1000, arg){
return new Promise((done) => {
setTimeout(function(){
done(arg);
}, t)
})
}

158
server/model/backend/ftp.go Normal file
View file

@ -0,0 +1,158 @@
package backend
import (
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/secsy/goftp"
"io"
"os"
"regexp"
"strings"
"time"
)
var FtpCache AppCache
func init() {
FtpCache = NewAppCache(2, 1)
FtpCache.OnEvict(func(key string, value interface{}) {
c := value.(*Ftp)
c.Close()
})
}
type Ftp struct {
client *goftp.Client
}
func NewFtp(params map[string]string, app *App) (IBackend, error) {
c := FtpCache.Get(params)
if c != nil {
d := c.(*Ftp)
return d, nil
}
if params["hostname"] == "" {
params["hostname"] = "localhost"
}
if params["port"] == "" {
params["port"] = "21"
}
if params["username"] == "" {
params["username"] = "anonymous"
}
if params["username"] == "anonymous" && params["password"] == "" {
params["password"] = "anonymous"
}
config := goftp.Config{
User: params["username"],
Password: params["password"],
ConnectionsPerHost: 2,
Timeout: 10 * time.Second,
}
client, err := goftp.DialConfig(config, params["hostname"]+":"+params["port"])
if err != nil {
return nil, err
}
backend := Ftp{client}
FtpCache.Set(params, &backend)
return backend, nil
}
func (f Ftp) Info() string {
return "ftp"
}
func (f Ftp) Home() (string, error) {
return f.client.Getwd()
}
func (f Ftp) Ls(path string) ([]os.FileInfo, error) {
// by default FTP don't seem to mind a readdir on a non existing
// directory so we first need to make sure the directory exists
conn, err := f.client.OpenRawConn()
if err != nil {
return nil, err
}
i, s, err := conn.SendCommand("CWD %s", path)
if err != nil {
return nil, NewError(err.Error(), 404)
} else if i >= 300 {
return nil, NewError(s, 404)
}
return f.client.ReadDir(path)
}
func (f Ftp) Cat(path string) (io.Reader, error) {
pr, pw := io.Pipe()
go func() {
if err := f.client.Retrieve(path, pw); err != nil {
pr.CloseWithError(NewError("Problem", 409))
}
pw.Close()
}()
return pr, nil
}
func (f Ftp) Mkdir(path string) error {
_, err := f.client.Mkdir(path)
return err
}
func (f Ftp) Rm(path string) error {
isDirectory := func(p string) bool {
return regexp.MustCompile(`\/$`).MatchString(p)
}
transformError := func(e error) error {
// For some reasons bsftp is struggling with the library
// sometimes returning a 200 OK
if e == nil {
return nil
}
if obj, ok := e.(goftp.Error); ok {
if obj.Code() < 300 && obj.Code() > 0 {
return nil
}
}
return e
}
if isDirectory(path) {
entries, err := f.Ls(path)
if transformError(err) != nil {
return err
}
for _, entry := range entries {
if entry.IsDir() {
err = f.Rm(path + entry.Name() + "/")
if transformError(err) != nil {
return err
}
} else {
err = f.Rm(path + entry.Name())
if transformError(err) != nil {
return err
}
}
}
err = f.client.Rmdir(path)
return transformError(err)
}
err := f.client.Delete(path)
return transformError(err)
}
func (f Ftp) Mv(from string, to string) error {
return f.client.Rename(from, to)
}
func (f Ftp) Touch(path string) error {
return f.client.Store(path, strings.NewReader(""))
}
func (f Ftp) Save(path string, file io.Reader) error {
return f.client.Store(path, file)
}
func (f Ftp) Close() error {
return f.client.Close()
}

View file

@ -1,159 +0,0 @@
var FtpClient = require("ftp");
// connections are reused to make things faster and avoid too much problems
const connections = {};
setInterval(() => {
for(let key in connections){
if(connections[key].date + (1000*120) < new Date().getTime()){
connections[key].conn.end();
delete connections[key];
}
}
}, 5000);
function connect(params){
if(connections[JSON.stringify(params)]){
connections[JSON.stringify(params)].date = new Date().getTime();
return Promise.resolve(connections[JSON.stringify(params)].conn);
}else{
let c = new FtpClient();
c.connect({
host: params.hostname,
port: params.port || 21,
user: params.username,
password: params.password
});
return new Promise((done, err) => {
c.on('ready', function(){
clearTimeout(timeout);
done(c);
connections[JSON.stringify(params)] = {
date: new Date().getTime(),
conn: c
}
});
c.on('error', function(error){
err(error)
})
// because of: https://github.com/mscdex/node-ftp/issues/187
let timeout = setTimeout(() => {
err('timeout');
}, 5000);
});
}
}
module.exports = {
test: function(params){
return connect(params)
.then(() => Promise.resolve(params))
},
cat: function(path, params){
return connect(params)
.then((c) => {
return new Promise((done, err) => {
c.get(path, function(error, stream) {
if (error){ err(error); }
else{ done(stream); }
});
});
});
},
ls: function(path, params){
return connect(params)
.then((c) => {
return new Promise((done, err) => {
c.list(path, function(error, list) {
if(error){ err(error) }
else{
list = list
.map(el => {
return {
size: el.size,
time: new Date(el.date).getTime(),
name: el.name,
type: function(t){
if(t === '-'){
return 'file';
}else if(t === 'd'){
return 'directory';
}else if(t === 'l'){
return 'link';
}
}(el.type),
can_read: null,
can_write: null,
can_delete: null,
can_move: null
}
})
.filter(el => {
return el.name === '.' || el.name === '..' ? false : true
});
done(list);
}
})
})
})
},
write: function(path, content, params){
return connect(params)
.then((c) => {
return new Promise((done, err) => {
c.put(content, path, function(error){
if (error){ err(error)}
else{ done('ok'); }
});
});
})
},
rm: function(path, params){
return connect(params)
.then((c) => {
return new Promise((done, err) => {
c.delete(path, function(error){
if(error){
c.rmdir(path, true, function(error){
if(error) { err(error) }
else{ done('ok dir'); }
});
}
else{ done('ok'); }
});
});
});
},
mv: function(from, to, params){
return connect(params)
.then((c) => {
return new Promise((done, err) => {
c.rename(from, to, function(error){
if(error){ err(error) }
else{ done('ok') }
});
});
});
},
mkdir: function(path, params){
return connect(params)
.then((c) => {
return new Promise((done, err) => {
c.mkdir(path, function(error){
if(error){ err(error) }
else{ done('ok') }
});
});
});
},
touch: function(path, params){
return connect(params)
.then((c) => {
return new Promise((done, err) => {
c.put(Buffer.from(''), path, function(error){
if (error){ err(error)}
else{ done('ok'); }
});
});
});
}
};

View file

@ -0,0 +1,327 @@
package backend
import (
. "github.com/mickael-kerjean/nuage/server/common"
"golang.org/x/net/context"
"golang.org/x/oauth2"
"golang.org/x/oauth2/google"
"google.golang.org/api/drive/v3"
"io"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
)
const gdriveFolderMarker = "application/vnd.google-apps.folder"
type GDrive struct {
Client *drive.Service
Config *oauth2.Config
}
func NewGDrive(params map[string]string, app *App) (IBackend, error) {
backend := GDrive{}
if app.Config.OAuthProvider.GoogleDrive.ClientID == "" {
return backend, NewError("Missing Client ID: Contact your admin", 502)
} else if app.Config.OAuthProvider.GoogleDrive.ClientSecret == "" {
return backend, NewError("Missing Client Secret: Contact your admin", 502)
} else if app.Config.General.Host == "" {
return backend, NewError("Missing Hostname: Contact your admin", 502)
}
config := &oauth2.Config{
Endpoint: google.Endpoint,
ClientID: app.Config.OAuthProvider.GoogleDrive.ClientID,
ClientSecret: app.Config.OAuthProvider.GoogleDrive.ClientSecret,
RedirectURL: app.Config.General.Host + "/login",
Scopes: []string{"https://www.googleapis.com/auth/drive"},
}
token := &oauth2.Token{
AccessToken: params["token"],
RefreshToken: params["refresh"],
Expiry: func(t string) time.Time {
expiry, err := strconv.ParseInt(t, 10, 64)
if err != nil {
return time.Now()
}
return time.Unix(expiry, 0)
}(params["expiry"]),
TokenType: "bearer",
}
client := config.Client(context.Background(), token)
srv, err := drive.New(client)
if err != nil {
return nil, NewError(err.Error(), 400)
}
backend.Client = srv
backend.Config = config
return backend, nil
}
func (g GDrive) Info() string {
return "googledrive"
}
func (g GDrive) OAuthURL() string {
return g.Config.AuthCodeURL("googledrive", oauth2.AccessTypeOnline)
}
func (g GDrive) OAuthToken(ctx *map[string]string) error {
token, err := g.Config.Exchange(oauth2.NoContext, (*ctx)["code"])
if err != nil {
return err
}
(*ctx)["token"] = token.AccessToken
(*ctx)["refresh"] = token.RefreshToken
(*ctx)["expiry"] = strconv.FormatInt(token.Expiry.UnixNano()/1000, 10)
delete(*ctx, "code")
return nil
}
func (g GDrive) Ls(path string) ([]os.FileInfo, error) {
files := make([]os.FileInfo, 0)
file, err := g.infoPath(path)
if err != nil {
return nil, err
}
res, err := g.Client.Files.List().Q("'" + file.id + "' in parents AND trashed = false").Fields("nextPageToken, files(name, size, modifiedTime, mimeType)").PageSize(500).Do()
if err != nil {
return nil, NewError(err.Error(), 404)
}
for _, obj := range res.Files {
files = append(files, File{
FName: obj.Name,
FType: func(mType string) string {
if mType == gdriveFolderMarker {
return "directory"
}
return "file"
}(obj.MimeType),
FTime: func(t string) int64 {
a, err := time.Parse(time.RFC3339, t)
if err != nil {
return 0
}
return a.UnixNano() / 1000
}(obj.ModifiedTime),
FSize: obj.Size,
})
}
return files, nil
}
func (g GDrive) Cat(path string) (io.Reader, error) {
file, err := g.infoPath(path)
if err != nil {
return nil, err
}
if strings.HasPrefix(file.mType, "application/vnd.google-apps") {
mType := "text/plain"
if file.mType == "application/vnd.google-apps.spreadsheet" {
mType = "text/csv"
}
data, err := g.Client.Files.Export(file.id, mType).Download()
if err != nil {
return nil, err
}
return data.Body, nil
}
data, err := g.Client.Files.Get(file.id).Download()
if err != nil {
return nil, err
}
return data.Body, nil
}
func (g GDrive) Mkdir(path string) error {
parent, err := g.infoPath(getParentPath(path))
if err != nil {
return NewError("Directory already exists", 409)
}
_, err = g.Client.Files.Create(&drive.File{
Name: filepath.Base(path),
Parents: []string{parent.id},
MimeType: gdriveFolderMarker,
}).Do()
return err
}
func (g GDrive) Rm(path string) error {
file, err := g.infoPath(path)
if err != nil {
return err
}
if err = g.Client.Files.Delete(file.id).Do(); err != nil {
return err
}
return nil
}
func (g GDrive) Mv(from string, to string) error {
ffile, err := g.infoPath(from)
if err != nil {
return err
}
tfile, err := g.infoPath(getParentPath(to))
if err != nil {
return err
}
_, err = g.Client.Files.Update(ffile.id, &drive.File{
Name: filepath.Base(to),
}).RemoveParents(ffile.parent).AddParents(tfile.id).Do()
return err
}
func (g GDrive) Touch(path string) error {
file, err := g.infoPath(getParentPath(path))
if err != nil {
return NewError("Base folder not found", 404)
}
_, err = g.Client.Files.Create(&drive.File{
Name: filepath.Base(path),
Parents: []string{file.id},
}).Media(strings.NewReader("")).Do()
return err
}
func (g GDrive) Save(path string, reader io.Reader) error {
if file, err := g.infoPath(path); err == nil {
_, err = g.Client.Files.Update(file.id, &drive.File{}).Media(reader).Do()
return err
}
file, err := g.infoPath(getParentPath(path))
if err != nil {
return err
}
_, err = g.Client.Files.Create(&drive.File{
Name: filepath.Base(path),
Parents: []string{file.id},
}).Media(reader).Do()
return err
}
func (g GDrive) infoPath(p string) (*GDriveMarker, error) {
FindSolutions := func(level int, folder string) ([]GDriveMarker, error) {
res, err := g.Client.Files.List().Q("name = '" + folder + "' AND trashed = false").Fields("files(parents, id, name, mimeType)").PageSize(500).Do()
if err != nil {
return nil, err
}
solutions := make([]GDriveMarker, 0)
for _, file := range res.Files {
if len(file.Parents) == 0 {
continue
}
solutions = append(solutions, GDriveMarker{
file.Id,
file.Parents[0],
file.Name,
level,
file.MimeType,
})
}
return solutions, nil
}
FindRoot := func(level int) ([]GDriveMarker, error) {
root := make([]GDriveMarker, 0)
res, err := g.Client.Files.List().Q("'root' in parents").Fields("files(parents, id, name, mimeType)").PageSize(1).Do()
if err != nil {
return nil, err
}
if len(res.Files) == 0 || len(res.Files[0].Parents) == 0 {
root = append(root, GDriveMarker{
"root",
"root",
"root",
level,
gdriveFolderMarker,
})
return root, nil
}
root = append(root, GDriveMarker{
res.Files[0].Parents[0],
"root",
"root",
level,
gdriveFolderMarker,
})
return root, nil
}
MergeSolutions := func(solutions_bag []GDriveMarker, solutions_new []GDriveMarker) []GDriveMarker {
if len(solutions_bag) == 0 {
return solutions_new
}
solutions := make([]GDriveMarker, 0)
for _, new := range solutions_new {
for _, old := range solutions_bag {
if new.id == old.parent && new.level+1 == old.level {
old.level = new.level
old.parent = new.id
solutions = append(solutions, old)
}
}
}
return solutions
}
var FindId func(folders []string, solutions_bag []GDriveMarker) (*GDriveMarker, error)
FindId = func(folders []string, solutions_bag []GDriveMarker) (*GDriveMarker, error) {
var solutions_new []GDriveMarker
var err error
if len(folders) == 0 {
solutions_new, err = FindRoot(0)
} else {
solutions_new, err = FindSolutions(len(folders), folders[len(folders)-1])
}
if err != nil {
return nil, NewError("Can't get data", 500)
}
solutions_bag = MergeSolutions(solutions_bag, solutions_new)
if len(solutions_bag) == 0 {
return nil, NewError("Doesn't exist", 404)
} else if len(solutions_bag) == 1 {
return &solutions_bag[0], nil
} else {
return FindId(folders[:len(folders)-1], solutions_bag)
}
}
path := make([]string, 0)
for _, chunk := range strings.Split(p, "/") {
if chunk == "" {
continue
}
path = append(path, chunk)
}
if len(path) == 0 {
return &GDriveMarker{
"root",
"",
"root",
0,
gdriveFolderMarker,
}, nil
}
return FindId(path, make([]GDriveMarker, 0))
}
type GDriveMarker struct {
id string
parent string
name string
level int
mType string
}
func getParentPath(path string) string {
re := regexp.MustCompile("/$")
path = re.ReplaceAllString(path, "")
return filepath.Dir(path) + "/"
}

View file

@ -1,500 +0,0 @@
// https://developers.google.com/drive/v3/web/quickstart/nodejs
// https://developers.google.com/apis-explorer/?hl=en_GB#p/drive/v3/
var google = require('googleapis'),
googleAuth = require('google-auth-library'),
config = require('../../../config_server'),
Stream = require('stream');
var client = google.drive('v3');
function findMimeType(filename){
let ext = filename.split('.').slice(-1)[0];
let list = {
xls: 'application/vnd.ms-excel',
xlsx: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
xml: 'text/xml',
ods: 'application/vnd.oasis.opendocument.spreadsheet',
csv: 'text/csv',
tmpl: 'text/plain',
org: 'text/plain',
md: 'text/plain',
pdf: 'application/pdf',
php: 'application/x-httpd-php',
jpg: 'image/jpeg',
png: 'image/png',
gif: 'image/gif',
bmp: 'image/bmp',
txt: 'text/plain',
text: 'text/plain',
conf: 'text/plain',
log: 'text/plain',
doc: 'application/msword',
js: 'text/js',
swf: 'application/x-shockwave-flash',
mp3: 'audio/mpeg',
zip: 'application/zip',
rar: 'application/rar',
tar: 'application/tar',
arj: 'application/arj',
cab: 'application/cab',
html: 'text/html',
htm: 'text/html'
};
return list[ext] || 'application/octet-stream';
}
function decode(path){
let tmp = path.trim().split('/');
let filename = tmp.pop() || null;
tmp.shift();
return {
name: filename,
parents: tmp,
full: filename === null ? tmp : [].concat(tmp, [filename])
};
}
function findId(auth, _folders, ids = []){
const folders = JSON.parse(JSON.stringify(_folders));
const name = folders.pop();
return search(auth, name, folders)
.then((files) => {
let solutions = findSolutions(files, ids);
let aggregatedSolution = [].concat(solutions, ids);
if(solutions.length === 0){
return Promise.reject({message: 'this path doesn\'t exist', code: 'UNKNOWN_PATH'});
}else if(solutions.length === 1){
return Promise.resolve(findFolderId(solutions[0], ids));
}else{
return findId(auth, folders, aggregatedSolution);
}
});
function search(_auth, _name, _folders){
if(_name === undefined){
return findRoot(_auth);
}else{
return findByName(_auth, _name, _folders.length + 1);
}
}
function findRoot(auth){
return new Promise((_done,_err) => {
client.files.list({
auth: auth,
q: "'root' in parents",
pageSize: 1,
fields: "files(parents, id, name)"
}, function(error, response){
if(error){_err(error);}
else{
if(response.files.length > 0){
_done(response.files.map((file) => {
return {
level: 0,
id: file.parents[0],
name: 'root'
};
}));
}else{
_done([{
level: 0,
id: 'root',
name: 'root'
}]);
}
}
});
});
}
function findByName(auth, name, _level){
return new Promise((_done,_err) => {
client.files.list({
auth: auth,
q: "name = '"+name+"' AND trashed = false",
pageSize: 500,
fields: "files(parents, id, name)"
}, function(error, response){
if(error){_err(error);}
else{
_done(response.files.map((file) => {
file.level = _level;
return file;
}));
}
});
});
}
function findFolderId(head, cache, result = 'root'){
for(let i=0, l=cache.length; i<l; i++){
if(head.id === cache[i].parents[0] && head.level + 1 === cache[i].level){
return findFolderId(cache[i], cache);
}
}
return head.id;
}
function findSolutions(newFiles, cache){
return newFiles.filter((newFile) => {
if(cache.length === 0){ return true;}
for(let i=0, j=cache.length; i<j; i++){
if(newFile.id === cache[i].parents[0] && (newFile.level + 1) === cache[i].level){
return true;
}
}
return false;
});
}
}
function authorize(params){
var auth = new googleAuth(),
client_id = config.gdrive.clientID,
client_secret = config.gdrive.clientSecret,
redirect_uri = config.gdrive.redirectURI;
var oauth2Client = new auth.OAuth2(client_id, client_secret, redirect_uri);
return Promise.resolve(oauth2Client);
}
function connect(params){
return authorize(params)
.then((auth) => {
return new Promise((done, err) => {
if(params && params.access_token){
auth.credentials = params;
done(auth);
}else if(params && params.code){
auth.getToken(params.code, function(error, token) {
if(error){ err(error); }
else{
auth.credentials = token;
done(auth);
}
});
}else{
err({message: 'can\'t connect without auth code or token', code: 'INVALID_CONNECTION'});
}
});
return Promise.resolve(auth);
});
}
module.exports = {
auth: function(params){
return authorize()
.then((auth) => {
return Promise.resolve(auth.generateAuthUrl({
access_type: 'online',
scope: [ "https://www.googleapis.com/auth/drive" ]
}));
});
},
test: function(params){
return connect(params)
.then((auth) => {
return new Promise((done, err) => {
client.files.list({
auth: auth,
q: "'root' in parents AND mimeType = 'application/vnd.google-apps.folder'",
pageSize: 5,
fields: "files(parents)"
}, function(error, response) {
if(error){ err(error); }
else{ done(auth.credentials); }
});
});
});
},
cat: function(path, params){
path = decode(path);
return connect(params)
.then((auth) => {
return findId(auth, path.full)
.then((id) => fileInfo(auth, id))
.then((file) => {
if(/application\/vnd.google-apps/.test(file.mimeType)){
let type = 'text/plain';
if(file.mimeType === 'application/vnd.google-apps.spreadsheet'){
type = 'text/csv';
}
return exporter(auth, file.id, type);
}else{
return download(auth, file.id);
}
});
})
.then(function(stream){
stream.on('response', function(response) {
delete response.headers;
});
return Promise.resolve(stream);
});
function fileInfo(auth, id){
return new Promise((done, err) => {
client.files.get({
auth: auth,
fileId: id
},function(error, response){
if(error){ err(error); }
else{ done(response); }
});
});
}
function download(auth, id){
var content = '';
return Promise.resolve(client.files.get({
auth: auth,
fileId: id,
alt: 'media'
}));
}
function exporter(auth, id, type){
var content = '';
return new Promise((done, err) => {
done(client.files.export({
auth: auth,
fileId: id,
mimeType: type
}));
});
}
},
ls: function(_path, params){
path = decode(_path);
return connect(params)
.then((auth) => {
return findId(auth, path.parents)
.then((id) => findDrive(auth, id))
.then(parse);
});
function findDrive(auth, id){
return new Promise((done, err) => {
client.files.list({
spaces: path.space,
auth: auth,
q: "'"+id+"' in parents AND trashed = false",
pageSize: 500,
fields: "files(id,mimeType,modifiedTime,name,size)"
}, function(error, response) {
if(error){ err(error); }
else{ done(response.files); }
});
});
}
function parse(files){
return Promise.resolve(files.map((file) => {
return {
type: file.mimeType === 'application/vnd.google-apps.folder'? 'directory' : 'file',
name: file.name,
size: file.hasOwnProperty('size')? Number(file.size) : 0,
time: new Date(file.modifiedTime).getTime()
};
}));
}
},
write: function(path, content, params){ // TODO
path = decode(path);
return connect(params)
.then((auth) => {
return fileAlreadyExist(auth, path)
.then((obj) => {
if(obj.alreadyExist === true){
return updateFile(auth, content, path.name, obj.id);
}
if(obj.alreadyExist === false){
return createFile(auth, content, path.name, obj.id);
}
});
});
function fileAlreadyExist(auth, path){
return findId(auth, path.full)
.then((id) => Promise.resolve({alreadyExist: true, id: id}))
.catch((err) => {
return findId(auth, path.parents)
.then((id) => Promise.resolve({alreadyExist: false, id: id}))
});
}
function createFile(_auth, _stream, _filename, _folderId){
return new Promise((done, err) => {
client.files.create({
auth: _auth,
fields: 'id',
media: {
mimeType: 'text/plain',
body: _stream
},
resource: {
name: _filename,
parents: [_folderId]
}
}, function(error){
if(error) {err(error); }
else{ done('ok'); }
});
});
}
function updateFile(_auth, _stream, _filename, _folderId){
return new Promise((done, err) => {
client.files.update({
auth: _auth,
fileId: _folderId,
fields: 'id',
media: {
mimeType: findMimeType(_filename),
body: _stream
}
}, function(error){
if(error) {err(error); }
else{ done('ok'); }
})
});
}
},
rm: function(path, params){
path = decode(path);
return connect(params)
.then((auth) => {
return findId(auth, path.full)
.then((id) => {
return new Promise((done, err) => {
client.files.delete({
fileId: id,
auth: auth
}, function(error){
if(error){ err(error); }
else{ done('ok'); }
})
});
});
});
},
mv: function(from, to, params){
from = decode(from);
to = decode(to);
return connect(params)
.then((auth) => {
return Promise.all([findId(auth, from.full), findId(auth, from.parents), findId(auth, to.parents)])
.then((res) => process(auth, res));
});
function wait(res){
return new Promise((done) => {
setTimeout(function(){
done(res);
}, 500);
});
}
function process(auth, res){
let fileId = res[0],
srcId = res[1],
destId = res[2];
let fields = 'id';
let params = {fileId, fileId, auth: auth};
if(destId !== srcId){
fields += ', parents';
params.addParents = destId;
params.removeParents = srcId;
}
if(to.name !== null && from.name !== null && from.name !== to.name ){
fields += 'name';
params.resource = {
name: to.name
};
}
return new Promise((done, err) => {
client.files.update(params, function(error, response){
if(error){ err(error); }
else{ done('ok'); }
});
});
}
},
mkdir: function(path, params){
path = decode(path);
return connect(params)
.then((auth) => {
return findId(auth, path.parents.slice(0, -1))
.then((folder) => {
return new Promise((done, err) => {
client.files.create({
fields: 'id',
auth: auth,
resource: {
name: path.parents.slice(-1)[0],
parents: [folder],
mimeType: 'application/vnd.google-apps.folder'
}
}, function(error){
if(error) {err(error); }
else{ done(auth); }
});
});
});
})
.then((auth) => verifyFolderCreation(auth, path.full));
function verifyFolderCreation(_auth, _path, n = 10){
return sleep(Math.abs(10 - n) * 100)
.then(() => findId(_auth, _path))
.catch((err) => {
if(n > 0 && err && err.code === 'UNKNOWN_PATH'){
return verifyFolderCreation(_auth, _path, n - 1);
}
return Promise.reject(err);
});
function sleep(t=1000, arg){
return new Promise((done) => {
setTimeout(function(){
done(arg);
}, t);
});
}
}
function copy(obj){
return JSON.parse(JSON.stringify(obj));
}
},
touch: function(path, params){
path = decode(path);
var readable = new Stream.Readable();
readable.push('');
readable.push(null);
return connect(params)
.then((auth) => {
return findId(auth, path.parents)
.then((folder) => {
return new Promise((done, err) => {
client.files.create({
auth: auth,
fields: 'id',
media: {
mimeType: 'text/plain',
body: readable
},
resource: {
name: path.name,
parents: [folder]
}
}, function(error){
if(error) {err(error); }
else{ done('ok'); }
});
});
});
});
}
};

347
server/model/backend/git.go Normal file
View file

@ -0,0 +1,347 @@
package backend
import (
"fmt"
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/mitchellh/hashstructure"
"golang.org/x/crypto/ssh"
"gopkg.in/src-d/go-git.v4"
"gopkg.in/src-d/go-git.v4/plumbing"
"gopkg.in/src-d/go-git.v4/plumbing/object"
"gopkg.in/src-d/go-git.v4/plumbing/transport"
"gopkg.in/src-d/go-git.v4/plumbing/transport/http"
sshgit "gopkg.in/src-d/go-git.v4/plumbing/transport/ssh"
"io"
"os"
"path/filepath"
"strings"
"time"
)
const GitCachePath = "data/cache/git/"
var GitCache AppCache
type Git struct {
git *GitLib
}
func init() {
GitCache = NewAppCache()
cachePath := filepath.Join(GetCurrentDir(), GitCachePath)
os.RemoveAll(cachePath)
os.MkdirAll(cachePath, os.ModePerm)
GitCache.OnEvict(func(key string, value interface{}) {
g := value.(*Git)
g.Close()
})
}
type GitParams struct {
repo string
username string
password string
passphrase string
commit string
branch string
authorName string
authorEmail string
committerName string
committerEmail string
basePath string
}
func NewGit(params map[string]string, app *App) (*Git, error) {
if obj := GitCache.Get(params); obj != nil {
return obj.(*Git), nil
}
g := &Git{
git: &GitLib{
params: &GitParams{
params["repo"],
params["username"],
params["password"],
params["passphrase"],
params["commit"],
params["branch"],
params["authorName"],
params["authorEmail"],
params["committerName"],
params["committerEmail"],
"",
},
},
}
p := g.git.params
if p.branch == "" {
p.branch = "master"
}
if p.commit == "" {
p.commit = "{action} ({filename}): {path}"
}
if p.authorName == "" {
p.authorName = "Nuage"
}
if p.authorEmail == "" {
p.authorEmail = "https://nuage.kerjean.me"
}
if p.committerName == "" {
p.committerName = "Nuage"
}
if p.committerEmail == "" {
p.committerEmail = "https://nuage.kerjean.me"
}
if len(params["password"]) > 2700 {
return nil, NewError("Your password doesn't fit in a cookie :/", 500)
}
hash, err := hashstructure.Hash(params, nil)
if err != nil {
return nil, NewError("Internal error", 500)
}
p.basePath = app.Helpers.AbsolutePath(GitCachePath + "repo_" + fmt.Sprint(hash) + "/")
repo, err := g.git.open(p, p.basePath)
g.git.repo = repo
if err != nil {
return g, err
}
GitCache.Set(params, g)
return g, nil
}
func (g Git) Info() string {
return "git"
}
func (g Git) Ls(path string) ([]os.FileInfo, error) {
g.git.refresh()
p, err := g.path(path)
if err != nil {
return nil, NewError(err.Error(), 403)
}
file, err := os.Open(p)
if err != nil {
return nil, err
}
return file.Readdir(0)
}
func (g Git) Cat(path string) (io.Reader, error) {
p, err := g.path(path)
if err != nil {
return nil, NewError(err.Error(), 403)
}
return os.Open(p)
}
func (g Git) Mkdir(path string) error {
p, err := g.path(path)
if err != nil {
return NewError(err.Error(), 403)
}
return os.Mkdir(p, os.ModePerm)
}
func (g Git) Rm(path string) error {
p, err := g.path(path)
if err != nil {
return NewError(err.Error(), 403)
}
if err = os.RemoveAll(p); err != nil {
return NewError(err.Error(), 403)
}
message := g.git.message("delete", path)
if err = g.git.save(message); err != nil {
return NewError(err.Error(), 403)
}
return nil
}
func (g Git) Mv(from string, to string) error {
fpath, err := g.path(from)
if err != nil {
return NewError(err.Error(), 403)
}
tpath, err := g.path(to)
if err != nil {
return NewError(err.Error(), 403)
}
if err = os.Rename(fpath, tpath); err != nil {
return NewError(err.Error(), 403)
}
message := g.git.message("move", from)
if err = g.git.save(message); err != nil {
return NewError(err.Error(), 403)
}
return nil
}
func (g Git) Touch(path string) error {
p, err := g.path(path)
if err != nil {
return NewError(err.Error(), 403)
}
file, err := os.Create(p)
if err != nil {
return NewError(err.Error(), 403)
}
file.Close()
message := g.git.message("create", path)
if err = g.git.save(message); err != nil {
return NewError(err.Error(), 403)
}
return nil
}
func (g Git) Save(path string, file io.Reader) error {
p, err := g.path(path)
if err != nil {
return NewError(err.Error(), 403)
}
fo, err := os.Create(p)
if err != nil {
return err
}
io.Copy(fo, file)
fo.Close()
message := g.git.message("save", path)
if err = g.git.save(message); err != nil {
return NewError(err.Error(), 403)
}
return nil
}
func (g Git) Close() error {
return os.RemoveAll(g.git.params.basePath)
}
func (g Git) path(path string) (string, error) {
if path == "" {
return "", NewError("No path available", 400)
}
basePath := filepath.Join(g.git.params.basePath, path)
if string(path[len(path)-1]) == "/" {
basePath += "/"
}
if strings.HasPrefix(basePath, g.git.params.basePath) == false {
return "", NewError("There's nothing here", 403)
}
return basePath, nil
}
type GitLib struct {
repo *git.Repository
params *GitParams
}
func (g *GitLib) open(params *GitParams, path string) (*git.Repository, error) {
g.params = params
if _, err := os.Stat(g.params.basePath); os.IsNotExist(err) {
auth, err := g.auth()
if err != nil {
return nil, err
}
return git.PlainClone(path, false, &git.CloneOptions{
URL: g.params.repo,
Depth: 1,
ReferenceName: plumbing.ReferenceName(fmt.Sprintf("refs/heads/%s", g.params.branch)),
SingleBranch: true,
Auth: auth,
})
}
return git.PlainOpen(g.params.basePath)
}
func (g *GitLib) save(message string) error {
w, err := g.repo.Worktree()
if err != nil {
return NewError(err.Error(), 500)
}
_, err = w.Add(".")
if err != nil {
return NewError(err.Error(), 500)
}
_, err = w.Commit(message, &git.CommitOptions{
All: true,
Author: &object.Signature{
Name: g.params.authorName,
Email: g.params.authorEmail,
When: time.Now(),
},
Committer: &object.Signature{
Name: g.params.committerName,
Email: g.params.committerEmail,
When: time.Now(),
},
})
if err != nil {
return err
}
auth, err := g.auth()
if err != nil {
return err
}
return g.repo.Push(&git.PushOptions{
Auth: auth,
})
}
func (g *GitLib) refresh() error {
w, err := g.repo.Worktree()
if err != nil {
return err
}
return w.Pull(&git.PullOptions{RemoteName: "origin"})
}
func (g *GitLib) auth() (transport.AuthMethod, error) {
if strings.HasPrefix(g.params.repo, "http") {
return &http.BasicAuth{
Username: g.params.username,
Password: g.params.password,
}, nil
}
isPrivateKey := func(pass string) bool {
if len(pass) > 1000 && strings.HasPrefix(pass, "-----") {
return true
}
return false
}
if isPrivateKey(g.params.password) {
signer, err := ssh.ParsePrivateKeyWithPassphrase([]byte(g.params.password), []byte(g.params.passphrase))
if err != nil {
return nil, err
}
return &sshgit.PublicKeys{
User: "git",
Signer: signer,
HostKeyCallbackHelper: sshgit.HostKeyCallbackHelper{
HostKeyCallback: ssh.InsecureIgnoreHostKey(),
},
}, nil
}
return &sshgit.Password{
User: g.params.username,
Password: g.params.password,
HostKeyCallbackHelper: sshgit.HostKeyCallbackHelper{
HostKeyCallback: ssh.InsecureIgnoreHostKey(),
},
}, nil
}
func (g *GitLib) message(action string, path string) string {
message := strings.Replace(g.params.commit, "{action}", "save", -1)
message = strings.Replace(message, "{filename}", filepath.Base(path), -1)
message = strings.Replace(message, "{path}", strings.Replace(path, g.params.basePath, "", -1), -1)
return message
}

View file

@ -1,338 +0,0 @@
const gitclient = require("nodegit"),
toString = require('stream-to-string'),
fs = require('fs'),
Readable = require('stream').Readable,
Path = require('path'),
crypto = require("crypto"),
BASE_PATH = "/tmp/";
let repos = {};
setInterval(autoVacuum, 1000*60*60); // autovacuum every hour
module.exports = {
test: function(params){
if(!params || !params.repo){ return Promise.reject({message: 'invalid authentication', code: 'INVALID_PARAMS'}) };
if(!params.commit) params.commit = "{action} ({filename}): {path}";
if(!params.branch) params.branch = 'master';
if(!params.author_name) params.author_name = "Nuage";
if(!params.author_email) params.author_email = "https://nuage.kerjean.me";
if(!params.committer_name) params.committer_name = "Nuage";
if(!params.committer_email) params.committer_email = "https://nuage.kerjean.me";
if(params.password && params.password.length > 2700){
return Promise.reject({message: "Your password couldn\'t fit in a cookie :/", code: "COOKIE_ERROR"})
}
return git.open(params)
.then(() => Promise.resolve(params));
},
cat: function(path, params){
return git.open(params)
.then((repo) => git.refresh(repo, params))
.then(() => file.cat(calculate_path(params, path)));
},
ls: function(path, params){
return git.open(params)
.then((repo) => git.refresh(repo, params))
.then(() => file.ls(calculate_path(params, path)))
.then((files) => files.filter((file) => (file.name === '.git' && file.type === 'directory') ? false: true));
},
write: function(path, content, params){
return git.open(params)
.then(() => file.write(calculate_path(params, path), content))
.then(() => git.save(params, path, "write"));
},
rm: function(path, params){
return git.open(params)
.then(() => file.rm(calculate_path(params, path)))
.then(() => git.save(params, path, "delete"));
},
mv: function(from, to, params){
return git.open(params)
.then(() => file.mv(calculate_path(params, from), calculate_path(params, to)))
.then(() => git.save(params, to, 'move'));
},
mkdir: function(path, params){
return git.open(params)
.then(() => file.mkdir(calculate_path(params, path)))
.then(() => git.save(params, path, "create"))
},
touch: function(path, params){
var stream = new Readable(); stream.push(''); stream.push(null);
return git.open(params)
.then(() => file.write(calculate_path(params, path), stream))
.then(() => git.save(params, path, 'create'));
}
};
function autoVacuum(){
file.ls(BASE_PATH).then((files) => {
files.map((_file) => {
const filename = _file.name,
full_path = BASE_PATH + filename;
if(wasCreatedByTheGitBackend(full_path) === false) return;
if(repos[full_path] === undefined){
// remove stuff that was created in a previous session
// => happen on server restart
remove(full_path);
}
// clean up after 5 hours without activity in the repo
const MAXIMUM_DATE_BEFORE_CLEAN = repos[full_path] + 1000*60*60*5;
if(new Date().getTime() > MAXIMUM_DATE_BEFORE_CLEAN){
remove(full_path);
delete repos[full_path];
}
});
});
function remove(path){
return file.rm(path).catch((err) => {
console.warn("WARNING: vacuum", err);
});
}
function wasCreatedByTheGitBackend(name){
return name.indexOf(BASE_PATH+"git_") === 0;
}
}
function calculate_path(params, path){
const repo = path_repo(params);
const full_path = Path.posix.join(repo, path);
if(full_path.indexOf(BASE_PATH) !== 0 || full_path === BASE_PATH){
return BASE_PATH+"error";
}
return full_path;
}
function path_repo(obj){
let hash = crypto.createHash('md5');
for(let key in obj){
if(typeof obj[key] === 'string'){
hash.update(obj[key]);
}
}
const path = BASE_PATH+"git_"+obj.uid+"_"+obj.repo.replace(/[^a-zA-Z]/g, "")+"_"+hash.digest('hex');
repos[path] = new Date().getTime();
return path;
}
const file = {};
file.write = function (path, stream){
return new Promise((done, err) => {
let writer = fs.createWriteStream(path, { flags : 'w' });
stream.pipe(writer);
writer.on('close', function(){
done('ok');
});
writer.on('error', function(error){
err(error);
});
});
};
file.mkdir = function(path){
return new Promise((done, err) => {
fs.mkdir(path, function(error){
if(error){ return err(error); }
return done("ok");
});
});
}
file.mv = function(from, to){
return new Promise((done, err) => {
fs.rename(from, to, function(error){
if(error){ return err(error); }
return done("ok");
});
});
}
file.ls = function(path){
return new Promise((done, err) => {
fs.readdir(path, (error, files) => {
if(error){ return err(error); }
Promise.all(files.map((file) => {
return stats(path+file).then((stat) => {
stat.name = file;
return Promise.resolve(stat);
});
})).then((files) => {
done(files.map((file) => {
return {
size: file.size,
time: new Date(file.mtime).getTime(),
name: file.name,
type: file.isFile()? 'file' : 'directory'
};
}));
}).catch((error) => err(error));
});
});
function stats(path){
return new Promise((done, err) => {
fs.stat(path, function(error, res){
if(error) return err(error);
return done(res);
});
});
}
}
file.rm = function(path){
return rm(path);
function rm(path){
return stat(path).then((_stat) => {
if(_stat.isDirectory()){
return ls(path)
.then((files) => Promise.all(files.map(file => rm(path+"/"+file))))
.then(() => removeEmptyFolder(path));
}else{
return removeFileOrLink(path);
}
});
}
function removeEmptyFolder(path){
return new Promise((done, err) => {
fs.rmdir(path, function(error){
if(error){ return err(error); }
return done("ok");
});
});
}
function removeFileOrLink(path){
return new Promise((done, err) => {
fs.unlink(path, function(error){
if(error){ return err(error); }
return done("ok");
});
});
}
function ls(path){
return new Promise((done, err) => {
fs.readdir(path, function (error, files) {
if(error) return err(error)
return done(files)
});
});
}
function stat(path){
return new Promise((done, err) => {
fs.stat(path, function (error, _stat) {
if(error){ return err(error); }
return done(_stat);
});
});
}
}
file.cat = function(path){
return Promise.resolve(fs.createReadStream(path));
}
const git = {};
git.open = function(params){
count = 0;
return gitclient.Repository.open(path_repo(params))
.catch((err) => {
return gitclient.Clone(params.repo, path_repo(params), {fetchOpts: { callbacks: { credentials: git_creds.bind(null, params) }}})
.then((repo) => {
const branch = params.branch;
return repo.getBranchCommit("origin/"+branch)
.catch(() => repo.getHeadCommit("origin"))
.then((commit) => {
return repo.createBranch(branch, commit)
.then(() => repo.checkoutBranch(branch))
.then(() => Promise.resolve(repo));
})
.catch(() => Promise.resolve(repo));
});
});
};
git.refresh = function(repo, params){
count = 0;
return repo.fetchAll({callbacks: { credentials: git_creds.bind(null, params) }})
.then(() => repo.mergeBranches(params.branch, "origin/"+params.branch, gitclient.Signature.default(repo), 2))
.catch(err => {
if(err.errno === -13){
return git.save(params, '', 'merge')
.then(() => git.refresh(repo, params))
.then(() => Promise.resolve(repo));
}
return Promise.resolve(repo);
});
};
git.save = function(params, path = '', type = ''){
count = 0;
const author = gitclient.Signature.now(params.author_name, params.author_email);
const committer = gitclient.Signature.now(params.committer_name, params.committer_email);
const message = params.commit
.replace("{action}", type)
.replace("{dirname}", Path.posix.dirname(path))
.replace("{filename}", Path.posix.basename(path))
.replace("{path}", path || '');
return git.open(params)
.then((repo) => Promise.all([
Promise.resolve(repo),
getParent(repo, params),
refresh(repo, params)
]))
.then((data) => {
const [repo, commit, oid] = data;
const parents = commit ? [commit] : [];
return repo.createCommit("HEAD", author, committer, message, oid, parents)
.then(() => Promise.resolve(repo));
})
.then((repo) => {
return repo.getRemote("origin")
.then((remote) => {
return remote.push(
["refs/heads/"+params.branch+":refs/heads/"+params.branch],
{ callbacks: { credentials: git_creds.bind(null, params, true) }}
);
})
.catch((err) => Promise.reject({status: 403, message: "Not authorized to push"}));
});
function getParent(repo, params){
return repo.getBranchCommit(params.branch)
.catch(() => {
return repo.getHeadCommit()
.catch(() => Promise.resolve(null));
});
}
function refresh(repo, params){
return repo.refreshIndex()
.then((index) => {
return index.addAll()
.then(() => index.write())
.then(() => index.writeTree());
});
}
};
// the count thinghy is used to see if the request succeeded or not
// when something fail, nodegit would just run the callback again and again.
// The only way to make it throw an error is to return the defaultNew thinghy
let count = 0;
function git_creds(params, fn, _count){
count += 1;
if(count > 1 && _count !== undefined){
return new gitclient.Cred.defaultNew();
}else if(/http[s]?\:\/\//.test(params.repo)){
return new gitclient.Cred.userpassPlaintextNew(params.username, params.password);
}else{
return new gitclient.Cred.sshKeyMemoryNew(params.username, "", params.password, params.passphrase || "")
}
}

View file

@ -0,0 +1,42 @@
package backend
import (
. "github.com/mickael-kerjean/nuage/server/common"
"io"
"os"
"strings"
)
type Nothing struct {
}
func NewNothing(params map[string]string, app *App) (*Nothing, error) {
return &Nothing{}, nil
}
func (b Nothing) Info() string {
return "N/A"
}
func (b Nothing) Ls(path string) ([]os.FileInfo, error) {
return nil, NewError("", 401)
}
func (b Nothing) Cat(path string) (io.Reader, error) {
return strings.NewReader(""), NewError("", 401)
}
func (b Nothing) Mkdir(path string) error {
return NewError("", 401)
}
func (b Nothing) Rm(path string) error {
return NewError("", 401)
}
func (b Nothing) Mv(from string, to string) error {
return NewError("", 401)
}
func (b Nothing) Touch(path string) error {
return NewError("", 401)
}
func (b Nothing) Save(path string, file io.Reader) error {
return NewError("", 401)
}

291
server/model/backend/s3.go Normal file
View file

@ -0,0 +1,291 @@
package backend
import (
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/aws/aws-sdk-go/aws/session"
"github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager"
. "github.com/mickael-kerjean/nuage/server/common"
"io"
"os"
"path/filepath"
"strings"
)
var S3Cache AppCache
type S3Backend struct {
client *s3.S3
config *aws.Config
params map[string]string
}
func init() {
S3Cache = NewAppCache(2, 1)
}
func NewS3(params map[string]string, app *App) (IBackend, error) {
if params["region"] == "" {
params["region"] = "us-east-2"
}
config := &aws.Config{
Credentials: credentials.NewStaticCredentials(params["access_key_id"], params["secret_access_key"], ""),
S3ForcePathStyle: aws.Bool(true),
Region: aws.String(params["region"]),
}
if params["endpoint"] != "" {
config.Endpoint = aws.String(params["endpoint"])
}
backend := &S3Backend{
config: config,
params: params,
client: s3.New(session.New(config)),
}
return backend, nil
}
func (s S3Backend) Info() string {
return "s3"
}
func (s S3Backend) Meta(path string) *Metadata {
if path == "/" {
return &Metadata{
CanCreateFile: NewBool(false),
CanRename: NewBool(false),
CanMove: NewBool(false),
CanUpload: NewBool(false),
}
}
return nil
}
func (s S3Backend) Ls(path string) ([]os.FileInfo, error) {
p := s.path(path)
files := make([]os.FileInfo, 0)
if p.bucket == "" {
b, err := s.client.ListBuckets(&s3.ListBucketsInput{})
if err != nil {
return nil, err
}
for _, bucket := range b.Buckets {
files = append(files, &File{
FName: *bucket.Name,
FType: "directory",
FTime: bucket.CreationDate.UnixNano() / 1000,
CanMove: NewBool(false),
})
}
return files, nil
}
client := s3.New(s.createSession(p.bucket))
objs, err := client.ListObjects(&s3.ListObjectsInput{
Bucket: aws.String(p.bucket),
Prefix: aws.String(p.path),
Delimiter: aws.String("/"),
})
if err != nil {
return nil, err
}
for _, object := range objs.Contents {
files = append(files, &File{
FName: filepath.Base(*object.Key),
FType: "file",
FTime: object.LastModified.UnixNano() / 1000,
FSize: *object.Size,
})
}
for _, object := range objs.CommonPrefixes {
files = append(files, &File{
FName: filepath.Base(*object.Prefix),
FType: "directory",
})
}
return files, nil
}
func (s S3Backend) Cat(path string) (io.Reader, error) {
p := s.path(path)
client := s3.New(s.createSession(p.bucket))
obj, err := client.GetObject(&s3.GetObjectInput{
Bucket: aws.String(p.bucket),
Key: aws.String(p.path),
})
if err != nil {
return nil, err
}
return obj.Body, nil
}
func (s S3Backend) Mkdir(path string) error {
p := s.path(path)
client := s3.New(s.createSession(p.bucket))
if p.path == "" {
_, err := client.CreateBucket(&s3.CreateBucketInput{
Bucket: aws.String(path),
})
return err
}
_, err := client.PutObject(&s3.PutObjectInput{
Bucket: aws.String(p.bucket),
Key: aws.String(p.path),
})
return err
}
func (s S3Backend) Rm(path string) error {
p := s.path(path)
client := s3.New(s.createSession(p.bucket))
if p.bucket == "" {
return NewError("Doesn't exist", 404)
}
objs, err := client.ListObjects(&s3.ListObjectsInput{
Bucket: aws.String(p.bucket),
Prefix: aws.String(p.path),
Delimiter: aws.String("/"),
})
if err != nil {
return err
}
for _, obj := range objs.Contents {
_, err := client.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(p.bucket),
Key: obj.Key,
})
if err != nil {
return err
}
}
for _, pref := range objs.CommonPrefixes {
s.Rm("/" + p.bucket + "/" + *pref.Prefix)
_, err := client.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(p.bucket),
Key: pref.Prefix,
})
if err != nil {
return err
}
}
if err != nil {
return err
}
if p.path == "" {
_, err := client.DeleteBucket(&s3.DeleteBucketInput{
Bucket: aws.String(p.bucket),
})
return err
}
_, err = client.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(p.bucket),
Key: aws.String(p.path),
})
return err
}
func (s S3Backend) Mv(from string, to string) error {
f := s.path(from)
t := s.path(to)
client := s3.New(s.createSession(f.bucket))
if f.path == "" {
return NewError("Can't move this", 403)
}
_, err := client.CopyObject(&s3.CopyObjectInput{
Bucket: aws.String(t.bucket),
CopySource: aws.String(f.bucket + "/" + f.path),
Key: aws.String(t.path),
})
if err != nil {
return err
}
return s.Rm(from)
}
func (s S3Backend) Touch(path string) error {
p := s.path(path)
client := s3.New(s.createSession(p.bucket))
if p.bucket == "" {
return NewError("Can't do that on S3", 403)
}
_, err := client.PutObject(&s3.PutObjectInput{
Body: strings.NewReader(""),
ContentLength: aws.Int64(0),
Bucket: aws.String(p.bucket),
Key: aws.String(p.path),
})
return err
}
func (s S3Backend) Save(path string, file io.Reader) error {
p := s.path(path)
if p.bucket == "" {
return NewError("Can't do that on S3", 403)
}
uploader := s3manager.NewUploader(s.createSession(path))
_, err := uploader.Upload(&s3manager.UploadInput{
Body: file,
Bucket: aws.String(p.bucket),
Key: aws.String(p.path),
})
return err
}
func (s S3Backend) createSession(bucket string) *session.Session {
params := s.params
params["bucket"] = bucket
c := S3Cache.Get(params)
if c == nil {
res, err := s.client.GetBucketLocation(&s3.GetBucketLocationInput{
Bucket: aws.String(bucket),
})
if err != nil {
s.config.Region = aws.String("us-east-1")
} else {
if res.LocationConstraint == nil {
s.config.Region = aws.String("us-east-1")
} else {
s.config.Region = res.LocationConstraint
}
}
S3Cache.Set(params, s.config.Region)
} else {
s.config.Region = c.(*string)
}
sess := session.New(s.config)
return sess
}
type S3Path struct {
bucket string
path string
}
func (s S3Backend) path(p string) S3Path {
sp := strings.Split(p, "/")
bucket := ""
if len(sp) > 1 {
bucket = sp[1]
}
path := ""
if len(sp) > 2 {
path = strings.Join(sp[2:], "/")
}
return S3Path{
bucket,
path,
}
}

View file

@ -1,246 +0,0 @@
// https://www.npmjs.com/package/aws-sdk
var AWS = require('aws-sdk');
function decode(path){
let tmp = path.split('/');
return {
bucket: tmp.splice(0, 2)[1] || null,
path: tmp.join('/')
}
}
function connect(params){
let config = {
apiVersion: '2006-03-01',
accessKeyId: params.access_key_id,
secretAccessKey: params.secret_access_key,
signatureVersion: 'v4',
s3ForcePathStyle: true,
//sslEnabled: true
};
if(params.endpoint){
config.endpoint = new AWS.Endpoint(params.endpoint);
}
var s3 = new AWS.S3(config);
return Promise.resolve(s3);
}
module.exports = {
test: function(params){
return connect(params)
.then((s3) => {
return new Promise((done, err) => {
s3.listBuckets(function(error, data) {
if(error){ err(error) }
else{ done(params) }
});
});
});
},
cat: function(path, params, res){
path = decode(path);
return connect(params)
.then((s3) => {
return Promise.resolve(s3.getObject({
Bucket: path.bucket,
Key: path.path
}).on('httpHeaders', function (statusCode, headers) {
res.set('content-type', headers['content-type']);
res.set('content-length', headers['content-length']);
res.set('last-modified', headers['last-modified']);
}).createReadStream())
});
},
ls: function(path, params){
if(/\/$/.test(path) === false) path += '/';
path = decode(path);
return connect(params)
.then((s3) => {
if(path.bucket === null){
return new Promise((done, err) => {
s3.listBuckets(function(error, data) {
if(error){ err(error) }
else{
let buckets = data.Buckets.map((bucket) => {
return {
name: bucket.Name,
type: 'bucket',
time: new Date(bucket.CreationDate).getTime(),
can_read: true,
can_delete: true,
can_move: false
}
});
buckets.push({type: 'metadata', name: './', can_create_file: false, can_create_directory: true});
done(buckets)
}
});
});
}else{
return new Promise((done, err) => {
s3.listObjects({
Bucket: path.bucket,
Prefix: path.path,
Delimiter: '/'
}, function(error, data) {
if(error){ err(error) }
else{
let content = data.Contents
.filter((file) => {
return file.Key === path.path? false : true;
})
.map((file) => {
return {
type: 'file',
size: file.Size,
time: new Date(file.LastModified).getTime(),
name: file.Key.split('/').pop()
}
});
let folders = data.CommonPrefixes.map((prefix) => {
return {
type: 'directory',
size: 0,
time: null,
name: prefix.Prefix.split('/').slice(-2)[0]
}
});
return done([].concat(folders, content));
}
});
});
}
});
},
write: function(path, stream, params){
path = decode(path);
return connect(params)
.then((s3) => {
return new Promise((done, err) => {
s3.upload({
Bucket: path.bucket,
Key: path.path,
Body: stream,
ContentLength: stream.byteCount
}, function(error, data) {
if(error){ err(error) }
else{
done('ok');
}
});
});
});
},
rm: function(path, params){
path = decode(path);
return connect(params)
.then((s3) => {
return new Promise((done, err) => {
s3.listObjects({
Bucket: path.bucket,
Prefix: path.path
}, function(error, obj){
if(error){ err(error); }
else{
Promise.all(obj.Contents.map((file) => {
return deleteObject(s3, path.bucket, file.Key)
})).then(function(){
if(path.path === ''){
s3.deleteBucket({
Bucket: path.bucket
}, function(error){
if(error){ err(error)}
else{ done('ok'); }
});
}else{
done('ok');
}
})
}
})
});
});
function deleteObject(s3, bucket, key){
return new Promise((done, err) => {
s3.deleteObject({
Bucket: bucket,
Key: key
}, function(error, data) {
if(error){ err(error) }
else{ done('ok') }
});
})
}
},
mv: function(from, to, params){
from = decode(from);
to = decode(to);
return connect(params)
.then((s3) => {
return new Promise((done, err) => {
s3.copyObject({
Bucket: to.bucket,
CopySource: from.bucket+'/'+from.path,
Key: to.path
}, function(error, data) {
if(error){ err(error) }
else{
s3.deleteObject({
Bucket: from.bucket,
Key: from.path
}, function(error){
if(error){ err(error) }
else{
done('ok');
}
})
}
});
});
});
},
mkdir: function(path, params){
if(/\/$/.test(path) === false) path += '/';
path = decode(path);
return connect(params)
.then((s3) => {
return new Promise((done, err) => {
if(path.path === ''){
s3.createBucket({
Bucket: path.bucket
}, function(error, data){
if(error){ err(error) }
else{ done('ok') }
});
}else{
s3.putObject({
Bucket: path.bucket,
Key: path.path
}, function(error, data) {
if(error){ err(error) }
else{ done('ok') }
});
}
});
})
},
touch: function(path, params){
path = decode(path);
return connect(params)
.then((s3) => {
return new Promise((done, err) => {
s3.putObject({
Bucket: path.bucket,
Key: path.path,
Body: ''
}, function(error, data) {
if(error){ err(error) }
else{ done('ok') }
});
});
})
}
}

View file

@ -0,0 +1,266 @@
package backend
import (
"fmt"
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/pkg/sftp"
"golang.org/x/crypto/ssh"
"io"
"os"
"strings"
)
var SftpCache AppCache
type Sftp struct {
SSHClient *ssh.Client
SFTPClient *sftp.Client
}
func init() {
SftpCache = NewAppCache()
SftpCache.OnEvict(func(key string, value interface{}) {
c := value.(*Sftp)
c.Close()
})
}
func NewSftp(params map[string]string, app *App) (*Sftp, error) {
var s Sftp = Sftp{}
p := struct {
hostname string
port string
username string
password string
passphrase string
}{
params["hostname"],
params["port"],
params["username"],
params["password"],
params["passphrase"],
}
if p.port == "" {
p.port = "22"
}
c := SftpCache.Get(params)
if c != nil {
d := c.(*Sftp)
return d, nil
}
addr := p.hostname + ":" + p.port
var auth []ssh.AuthMethod
isPrivateKey := func(pass string) bool {
if len(pass) > 1000 && strings.HasPrefix(pass, "-----") {
return true
}
return false
}
if isPrivateKey(p.password) {
signer, err := ssh.ParsePrivateKeyWithPassphrase([]byte(p.password), []byte(p.passphrase))
if err == nil {
auth = []ssh.AuthMethod{ssh.PublicKeys(signer)}
}
} else {
auth = []ssh.AuthMethod{ssh.Password(p.password)}
}
config := &ssh.ClientConfig{
User: p.username,
Auth: auth,
HostKeyCallback: ssh.InsecureIgnoreHostKey(),
}
client, err := ssh.Dial("tcp", addr, config)
if err != nil {
fmt.Println(err.Error())
return &s, NewError("Connection denied", 502)
}
s.SSHClient = client
session, err := sftp.NewClient(s.SSHClient)
if err != nil {
return &s, NewError("Can't establish connection", 502)
}
s.SFTPClient = session
SftpCache.Set(params, &s)
return &s, nil
}
func (b Sftp) Info() string {
return "sftp"
}
func (b Sftp) Home() (string, error) {
cwd, err := b.SFTPClient.Getwd()
if err != nil {
return "", b.err(err)
}
length := len(cwd)
if length > 0 && cwd[length-1:] != "/" {
return cwd + "/", nil
}
return cwd, nil
}
func (b Sftp) Ls(path string) ([]os.FileInfo, error) {
files, err := b.SFTPClient.ReadDir(path)
return files, b.err(err)
}
func (b Sftp) Cat(path string) (io.Reader, error) {
remoteFile, err := b.SFTPClient.Open(path)
if err != nil {
return nil, b.err(err)
}
return remoteFile, nil
}
func (b Sftp) Mkdir(path string) error {
err := b.SFTPClient.Mkdir(path)
return b.err(err)
}
func (b Sftp) Rm(path string) error {
if IsDirectory(path) {
list, err := b.SFTPClient.ReadDir(path)
if err != nil {
return b.err(err)
}
for _, entry := range list {
p := path + entry.Name()
if entry.IsDir() {
p += "/"
err := b.Rm(p)
if err != nil {
return b.err(err)
}
} else {
err := b.SFTPClient.Remove(p)
if err != nil {
return b.err(err)
}
}
}
err = b.SFTPClient.RemoveDirectory(path)
if err != nil {
return b.err(err)
}
} else {
err := b.SFTPClient.Remove(path)
return b.err(err)
}
return nil
}
func (b Sftp) Mv(from string, to string) error {
err := b.SFTPClient.Rename(from, to)
return b.err(err)
}
func (b Sftp) Touch(path string) error {
file, err := b.SFTPClient.Create(path)
if err != nil {
return b.err(err)
}
_, err = file.ReadFrom(strings.NewReader(""))
return b.err(err)
}
func (b Sftp) Save(path string, file io.Reader) error {
remoteFile, err := b.SFTPClient.OpenFile(path, os.O_WRONLY|os.O_CREATE)
if err != nil {
return b.err(err)
}
_, err = remoteFile.ReadFrom(file)
return b.err(err)
}
func (b Sftp) Close() error {
err0 := b.SFTPClient.Close()
err1 := b.SSHClient.Close()
if err0 != nil {
return err0
}
return err1
}
func (b Sftp) err(e error) error {
f, ok := e.(*sftp.StatusError)
if ok == false {
return e
}
switch f.Code {
case 0:
return nil
case 1:
return NewError("There's nothing more to see", 404)
case 2:
return NewError("Does not exist", 404)
case 3:
return NewError("Permission denied", 403)
case 4:
return NewError("Failure", 400)
case 5:
return NewError("Not Compatible", 400)
case 6:
return NewError("No Connection", 503)
case 7:
return NewError("Connection Lost", 503)
case 8:
return NewError("Operation not supported", 501)
case 9:
return NewError("Not valid", 400)
case 10:
return NewError("No such path", 404)
case 11:
return NewError("File already exists", 409)
case 12:
return NewError("Write protected", 403)
case 13:
return NewError("No media", 404)
case 14:
return NewError("No space left", 400)
case 15:
return NewError("Quota exceeded", 400)
case 16:
return NewError("Unknown", 400)
case 17:
return NewError("Lock conflict", 409)
case 18:
return NewError("Directory not empty", 400)
case 19:
return NewError("Not a directory", 400)
case 20:
return NewError("Invalid filename", 400)
case 21:
return NewError("Link loop", 508)
case 22:
return NewError("Cannot delete", 400)
case 23:
return NewError("Invalid query", 400)
case 24:
return NewError("File is a directory", 400)
case 25:
return NewError("Lock conflict", 409)
case 26:
return NewError("Lock refused", 400)
case 27:
return NewError("Delete pending", 400)
case 28:
return NewError("File corrupt", 400)
case 29:
return NewError("Invalid owner", 400)
case 30:
return NewError("Invalid group", 400)
case 31:
return NewError("Lock wasn't granted", 400)
default:
return NewError("Oops! Something went wrong", 500)
}
}

View file

@ -1,89 +0,0 @@
var Client = require('ssh2-sftp-client');
const connections = {};
setInterval(() => {
for(let key in connections){
if(connections[key].date + (1000*120) < new Date().getTime()){
connections[key].conn.end();
delete connections[key];
}
}
}, 5000);
function connect(params){
if(connections[JSON.stringify(params)]){
connections[JSON.stringify(params)].date = new Date().getTime();
return Promise.resolve(connections[JSON.stringify(params)].conn);
}else{
let sftp = new Client();
let opts = {host: params.host, port: params.port || 22, username: params.username};
if(params.hasOwnProperty('private_key') && params['private_key']){
opts.privateKey = params['private_key']
}else{
opts.password = params['password'];
}
return sftp.connect(opts).then((res) => {
connections[JSON.stringify(params)] = {
date: new Date().getTime(),
conn: sftp
}
return Promise.resolve(sftp)
});
}
}
module.exports = {
test: function(params){
return connect(params)
.then(() => Promise.resolve(params))
},
cat: function(path, params){
return connect(params)
.then((sftp) => sftp.get(path, false, null));
},
ls: function(path, params){
return connect(params)
.then((sftp) => sftp.list(path))
.then((res) => Promise.resolve(res.map((file) => {
return {
type: function(type){
if(type === 'd'){
return 'directory'
}else if(type === 'l'){
return 'link';
}else if(type === '-'){
return 'file';
}else{
return 'unknown';
}
}(file.type),
name: file.name,
size: file.size,
time: file.modifyTime
};
})));
},
write: function(path, content, params){
return connect(params)
.then((sftp) => sftp.put(content, path))
},
rm: function(path, params){
return connect(params)
.then((sftp) => {
return sftp.delete(path)
.catch((err) => sftp.rmdir(path, true))
});
},
mv: function(from, to, params){
return connect(params)
.then((sftp) => sftp.rename(from, to));
},
mkdir: function(path, params){
return connect(params)
.then((sftp) => sftp.mkdir(path, false))
},
touch: function(path, params){
return connect(params)
.then((sftp) => sftp.put(Buffer.from(''), path))
}
}

View file

@ -0,0 +1,206 @@
package backend
import (
"encoding/xml"
. "github.com/mickael-kerjean/nuage/server/common"
"io"
"net/http"
"net/url"
"os"
"path/filepath"
"regexp"
"strings"
"time"
)
type WebDav struct {
params *WebDavParams
}
type WebDavParams struct {
url string
username string
password string
path string
}
func NewWebDav(params map[string]string, app *App) (IBackend, error) {
params["url"] = regexp.MustCompile(`\/$`).ReplaceAllString(params["url"], "")
backend := WebDav{
params: &WebDavParams{
params["url"],
params["username"],
params["password"],
params["path"],
},
}
return backend, nil
}
func (w WebDav) Info() string {
return "webdav"
}
func (w WebDav) Ls(path string) ([]os.FileInfo, error) {
files := make([]os.FileInfo, 0)
query := `<d:propfind xmlns:d='DAV:'>
<d:prop>
<d:displayname/>
<d:resourcetype/>
<d:getlastmodified/>
</d:prop>
</d:propfind>`
res, err := w.request("PROPFIND", w.params.url+encodeURL(path), strings.NewReader(query), nil)
if err != nil {
return nil, err
}
defer res.Body.Close()
if res.StatusCode >= 400 {
return nil, NewError(HTTPFriendlyStatus(res.StatusCode)+": can't get things in "+filepath.Base(path), res.StatusCode)
}
var r WebDavResp
decoder := xml.NewDecoder(res.Body)
decoder.Decode(&r)
if len(r.Responses) == 0 {
return nil, NewError("Server not found", 404)
}
URLDav := regexp.MustCompile(`^http[s]?://[^/]*`).ReplaceAllString(w.params.url+encodeURL(path), "")
for _, tag := range r.Responses {
if tag.Href == URLDav {
continue
}
for i, prop := range tag.Props {
if i > 0 {
break
}
t, _ := time.Parse(time.RFC1123Z, prop.Modified)
files = append(files, File{
FName: func(p string) string {
name := filepath.Base(p)
name = decodeURL(name)
return name
}(tag.Href),
FType: func(p string) string {
if p == "collection" {
return "directory"
}
return "file"
}(prop.Type.Local),
FTime: t.UnixNano() / 1000,
FSize: int64(prop.Size),
})
}
}
return files, nil
}
func (w WebDav) Cat(path string) (io.Reader, error) {
res, err := w.request("GET", w.params.url+encodeURL(path), nil, nil)
if err != nil {
return nil, err
}
if res.StatusCode >= 400 {
return nil, NewError(HTTPFriendlyStatus(res.StatusCode)+": can't create "+filepath.Base(path), res.StatusCode)
}
return res.Body, nil
}
func (w WebDav) Mkdir(path string) error {
res, err := w.request("MKCOL", w.params.url+encodeURL(path), nil, func(req *http.Request) {
req.Header.Add("Overwrite", "F")
})
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode >= 400 {
return NewError(HTTPFriendlyStatus(res.StatusCode)+": can't create "+filepath.Base(path), res.StatusCode)
}
return nil
}
func (w WebDav) Rm(path string) error {
res, err := w.request("DELETE", w.params.url+encodeURL(path), nil, nil)
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode >= 400 {
return NewError(HTTPFriendlyStatus(res.StatusCode)+": can't remove "+filepath.Base(path), res.StatusCode)
}
return nil
}
func (w WebDav) Mv(from string, to string) error {
res, err := w.request("MOVE", w.params.url+encodeURL(from), nil, func(req *http.Request) {
req.Header.Add("Destination", w.params.url+encodeURL(to))
req.Header.Add("Overwrite", "T")
})
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode >= 400 {
return NewError(HTTPFriendlyStatus(res.StatusCode)+": can't do that", res.StatusCode)
}
return nil
}
func (w WebDav) Touch(path string) error {
return w.Save(path, strings.NewReader(""))
}
func (w WebDav) Save(path string, file io.Reader) error {
res, err := w.request("PUT", w.params.url+encodeURL(path), file, nil)
if err != nil {
return err
}
res.Body.Close()
if res.StatusCode >= 400 {
return NewError(HTTPFriendlyStatus(res.StatusCode)+": can't do that", res.StatusCode)
}
return nil
}
func (w WebDav) request(method string, url string, body io.Reader, fn func(req *http.Request)) (*http.Response, error) {
req, err := http.NewRequest(method, url, body)
if err != nil {
return nil, err
}
if w.params.username != "" {
req.SetBasicAuth(w.params.username, w.params.password)
}
req.Header.Add("Content-Type", "text/xml;charset=UTF-8")
req.Header.Add("Accept", "application/xml,text/xml")
req.Header.Add("Accept-Charset", "utf-8")
if req.Body != nil {
defer req.Body.Close()
}
if fn != nil {
fn(req)
}
return HTTPClient.Do(req)
}
type WebDavResp struct {
Responses []struct {
Href string `xml:"href"`
Props []struct {
Name string `xml:"prop>displayname,omitempty"`
Type xml.Name `xml:"prop>resourcetype>collection,omitempty"`
Size int64 `xml:"prop>getcontentlength,omitempty"`
Modified string `xml:"prop>getlastmodified,omitempty"`
} `xml:"propstat"`
} `xml:"response"`
}
func encodeURL(path string) string {
p := url.PathEscape(path)
return strings.Replace(p, "%2F", "/", -1)
}
func decodeURL(path string) string {
str, err := url.PathUnescape(path)
if err != nil {
return path
}
return str
}

View file

@ -1,86 +0,0 @@
var fs = require("webdav-fs");
var Readable = require('stream').Readable;
var toString = require('stream-to-string');
function connect(params){
return fs(
params.url,
params.username,
params.password
);
}
module.exports = {
test: function(params){
return new Promise((done, err) => {
connect(params).readFile('/', function(error, res){
if(error){ err(error); }
else{ done(params); }
});
});
},
cat: function(path, params){
return Promise.resolve(connect(params).createReadStream(path));
},
ls: function(path, params){
return new Promise((done, err) => {
connect(params).readdir(path, function(error, contents) {
if (!error) {
done(contents.map((content) => {
return {
name: content.name,
type: function(cont){
if(cont.isDirectory()){
return 'directory';
}else if(cont.isFile()){
return 'file'
}else{
return null;
}
}(content),
time: content.mtime,
size: content.size
}
}));
} else {
err(error);
}
}, 'stat');
});
},
write: function(path, content, params){
return Promise.resolve(content.pipe(connect(params).createWriteStream(path)));
},
rm: function(path, params){
return new Promise((done, err) => {
connect(params).unlink(path, function (error) {
if(error){ err(error); }
else{ done('ok'); }
});
});
},
mv: function(from, to, params){
return new Promise((done, err) => {
connect(params).rename(from, to, function (error) {
if(error){ err(error); }
else{ done('ok'); }
});
});
},
mkdir: function(path, params){
return new Promise((done, err) => {
connect(params).mkdir(path, function(error) {
if(error){ err(error); }
else{ done('done'); }
});
});
},
touch: function(path, params){
return new Promise((done, err) => {
connect(params).writeFile(path, '', function(error) {
if(error){ err(error); }
else{ done('done'); }
});
});
}
}

37
server/model/files.go Normal file
View file

@ -0,0 +1,37 @@
package model
import (
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/mickael-kerjean/nuage/server/model/backend"
)
func NewBackend(ctx *App, conn map[string]string) (IBackend, error) {
switch conn["type"] {
case "webdav":
return backend.NewWebDav(conn, ctx)
case "ftp":
return backend.NewFtp(conn, ctx)
case "sftp":
return backend.NewSftp(conn, ctx)
case "git":
return backend.NewGit(conn, ctx)
case "s3":
return backend.NewS3(conn, ctx)
case "dropbox":
return backend.NewDropbox(conn, ctx)
case "gdrive":
return backend.NewGDrive(conn, ctx)
default:
return backend.NewNothing(conn, ctx)
}
return nil, NewError("Invalid backend type", 501)
}
func GetHome(b IBackend) (string, error) {
obj, ok := b.(interface{ Home() (string, error) })
if ok == false {
_, err := b.Ls("/")
return "", err
}
return obj.Home()
}

View file

@ -1,100 +0,0 @@
var backend = {
ftp: require('./backend/ftp'),
sftp: require('./backend/sftp'),
webdav: require('./backend/webdav'),
dropbox: require('./backend/dropbox'),
gdrive: require('./backend/gdrive'),
s3: require('./backend/s3'),
git: require('./backend/git')
};
exports.cat = function(path, params, res){
try{
if(backend[params.type] && typeof backend[params.type].cat === 'function'){
return backend[params.type].cat(path, params.payload, res);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
exports.write = function(path, content, params){
try{
if(backend[params.type] && typeof backend[params.type].write === 'function'){
return backend[params.type].write(path, content, params.payload);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
exports.ls = function(path, params){
try{
if(backend[params.type] && typeof backend[params.type].ls === 'function'){
return backend[params.type].ls(path, params.payload);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
exports.mv = function(from, to, params){
try{
if(backend[params.type] && typeof backend[params.type].mv === 'function'){
return backend[params.type].mv(from, to, params.payload);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
exports.rm = function(path, params){
try{
if(backend[params.type] && typeof backend[params.type].rm === 'function'){
return backend[params.type].rm(path, params.payload);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
exports.mkdir = function(path, params){
try{
if(backend[params.type] && typeof backend[params.type].mkdir === 'function'){
return backend[params.type].mkdir(path, params.payload);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
exports.touch = function(path, params){
try{
if(backend[params.type] && typeof backend[params.type].touch === 'function'){
return backend[params.type].touch(path, params.payload);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
function error(message){
return new Promise((done, err) => {
err(message);
});
}

262
server/model/files_test.go Normal file
View file

@ -0,0 +1,262 @@
package model
import (
"fmt"
. "github.com/mickael-kerjean/nuage/server/common"
"io/ioutil"
"os"
"strings"
"testing"
)
var app *App
func init() {
app = &App{}
app.Config = &Config{}
app.Config.Initialise()
app.Config.General.Host = "http://test"
app.Config.OAuthProvider.Dropbox.ClientID = ""
app.Config.OAuthProvider.GoogleDrive.ClientID = ""
app.Config.OAuthProvider.GoogleDrive.ClientID = ""
}
func TestWebdav(t *testing.T) {
if os.Getenv("WEBDAV_URL") == "" {
fmt.Println("- skipped webdav")
return
}
b, err := NewBackend(&App{}, map[string]string{
"type": "webdav",
"url": os.Getenv("WEBDAV_URL"),
})
if err != nil {
t.Errorf("Can't create WebDav backend")
}
setup(t, b)
suite(t, b)
tearDown(t, b)
}
func TestFtp(t *testing.T) {
if os.Getenv("FTP_USERNAME") == "" || os.Getenv("FTP_PASSWORD") == "" {
fmt.Println("- skipped ftp")
return
}
b, err := NewBackend(&App{}, map[string]string{
"type": "ftp",
"hostname": "127.0.0.1",
"username": os.Getenv("FTP_USERNAME"),
"password": os.Getenv("FTP_PASSWORD"),
})
if err != nil {
t.Errorf("Can't create FTP backend")
}
setup(t, b)
suite(t, b)
tearDown(t, b)
b.Rm("/tmp/")
}
func TestSFtp(t *testing.T) {
if os.Getenv("SFTP_USERNAME") == "" || os.Getenv("SFTP_PASSWORD") == "" {
fmt.Println("- skipped sftp")
return
}
b, err := NewBackend(&App{}, map[string]string{
"type": "sftp",
"hostname": "127.0.0.1",
"username": os.Getenv("SFTP_USERNAME"),
"password": os.Getenv("SFTP_PASSWORD"),
})
if err != nil {
t.Errorf("Can't create SFTP backend")
}
setup(t, b)
suite(t, b)
tearDown(t, b)
}
func TestGit(t *testing.T) {
if os.Getenv("GIT_USERNAME") == "" || os.Getenv("GIT_PASSWORD") == "" {
fmt.Println("- skipped git")
return
}
b, err := NewBackend(app, map[string]string{
"type": "git",
"repo": "https://github.com/mickael-kerjean/tmp",
"username": os.Getenv("GIT_EMAIL"),
"password": os.Getenv("GIT_PASSWORD"),
})
if err != nil {
t.Errorf("Can't create Git backend")
}
setup(t, b)
suite(t, b)
tearDown(t, b)
}
func TestS3(t *testing.T) {
if os.Getenv("S3_ID") == "" || os.Getenv("S3_SECRET") == "" {
fmt.Println("- skipped S3")
return
}
b, err := NewBackend(&App{}, map[string]string{
"type": "s3",
"access_key_id": os.Getenv("S3_ID"),
"secret_access_key": os.Getenv("S3_SECRET"),
"endpoint": os.Getenv("S3_ENDPOINT"),
})
if err != nil {
t.Errorf("Can't create S3 backend")
}
setup(t, b)
suite(t, b)
tearDown(t, b)
}
func TestDropbox(t *testing.T) {
if os.Getenv("DROPBOX_TOKEN") == "" {
fmt.Println("- skipped Dropbox")
return
}
b, err := NewBackend(app, map[string]string{
"type": "dropbox",
"bearer": os.Getenv("DROPBOX_TOKEN"),
})
if err != nil {
t.Errorf("Can't create a Dropbox backend")
}
setup(t, b)
suite(t, b)
tearDown(t, b)
}
func TestGoogleDrive(t *testing.T) {
if os.Getenv("GDRIVE_TOKEN") == "" {
fmt.Println("- skipped Google Drive")
return
}
b, err := NewBackend(app, map[string]string{
"type": "gdrive",
"expiry": "",
"token": os.Getenv("GDRIVE_TOKEN"),
})
if err != nil {
t.Errorf("Can't create a Google Drive backend")
}
setup(t, b)
suite(t, b)
tearDown(t, b)
}
func setup(t *testing.T, b IBackend) {
b.Rm("/tmp/test/")
b.Mkdir("/tmp/")
b.Mkdir("/tmp/test/")
}
func tearDown(t *testing.T, b IBackend) {
b.Rm("/tmp/test/")
}
func suite(t *testing.T, b IBackend) {
// create state
content := "lorem ipsum"
b.Mkdir("/tmp/test/trash/")
b.Touch("/tmp/test/test0.txt")
b.Save("/tmp/test/test0.txt", strings.NewReader(content))
b.Save("/tmp/test/test1.txt", strings.NewReader(content))
b.Touch("/tmp/test/test2.txt")
b.Mv("/tmp/test/test0.txt", "/tmp/test/trash/test0.txt")
// list all files
tmp0, err := b.Ls("/tmp/test/")
if err != nil {
t.Errorf("Ls error: %s", err)
return
}
if len(tmp0) != 3 {
t.Errorf("LS error: got: %d elmnt, want: %d", len(tmp0), 3)
return
}
// read file
tmp1, err := b.Cat("/tmp/test/trash/test0.txt")
if err != nil {
t.Errorf("Cat error: %s", err)
return
}
tmp2, err := ioutil.ReadAll(tmp1)
if err != nil {
t.Errorf("Cat error: %s", err)
return
}
if string(tmp2) != content {
t.Errorf("Incorrect file: %s, want: %s.", tmp2, content)
return
}
if obj, ok := tmp1.(interface{ Close() error }); ok {
obj.Close()
}
tmp1, err = b.Cat("/tmp/test/test1.txt")
if err != nil {
t.Errorf("Cat error: %s", err)
return
}
tmp2, err = ioutil.ReadAll(tmp1)
if err != nil {
t.Errorf("Cat error: %s", err)
return
}
if string(tmp2) != content {
t.Errorf("Incorrect file: %s, want: %s.", tmp2, content)
return
}
if obj, ok := tmp1.(interface{ Close() error }); ok {
obj.Close()
}
tmp1, err = b.Cat("/tmp/test/test2.txt")
if err != nil {
t.Errorf("Cat error: %s", err)
return
}
tmp2, err = ioutil.ReadAll(tmp1)
if err != nil {
t.Errorf("Cat error: %s", err)
return
}
if string(tmp2) != "" {
t.Errorf("Incorrect file: %s, want: %s.", tmp2, "")
return
}
if obj, ok := tmp1.(interface{ Close() error }); ok {
obj.Close()
}
// remove file
b.Rm("/tmp/test/test2.txt")
tmp0, err = b.Ls("/tmp/test/")
if len(tmp0) != 2 {
t.Errorf("Test folder elements, got: %d, want: %d.", len(tmp0), 2)
return
}
tmp0, err = b.Ls("/tmp/test/")
if err != nil {
t.Errorf("Ls error %s", err)
return
}
if len(tmp0) != 2 {
t.Errorf("LS error: got: %d elmnt, want: %d", len(tmp0), 2)
return
}
// remove folder
b.Rm("/tmp/test/")
tmp0, err = b.Ls("/tmp/test/")
if err == nil {
t.Errorf("Removed folder still exists: %d", len(tmp0))
return
}
}

View file

@ -1,39 +0,0 @@
var backend = {
ftp: require('./backend/ftp'),
sftp: require('./backend/sftp'),
webdav: require('./backend/webdav'),
dropbox: require('./backend/dropbox'),
gdrive: require('./backend/gdrive'),
s3: require('./backend/s3'),
git: require('./backend/git')
};
exports.test = function(params){
try{
if(backend[params.type] && typeof backend[params.type].test === 'function'){
return backend[params.type].test(params);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
exports.auth = function(params){
try{
if(backend[params.type] && typeof backend[params.type].auth === 'function'){
return backend[params.type].auth(params);
}else{
return error('not implemented');
}
}catch(err){
return error(err);
}
}
function error(message){
return new Promise((done, err) => {
err(message);
});
}

16
server/router/config.go Normal file
View file

@ -0,0 +1,16 @@
package router
import (
. "github.com/mickael-kerjean/nuage/server/common"
"net/http"
)
func ConfigHandler(ctx App, res http.ResponseWriter, req *http.Request) {
c, err := ctx.Config.Export()
if err != nil {
sendErrorResult(res, err)
return
}
res.Write([]byte("window.CONFIG = "))
res.Write([]byte(c))
}

195
server/router/files.go Normal file
View file

@ -0,0 +1,195 @@
package router
import (
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/mickael-kerjean/nuage/server/services"
"io"
"net/http"
"path/filepath"
"strings"
"time"
)
type FileInfo struct {
Name string `json:"name"`
Type string `json:"type"`
Size int64 `json:"size"`
Time int64 `json:"time"`
}
func FileLs(ctx App, res http.ResponseWriter, req *http.Request) {
path, err := pathBuilder(ctx, req.URL.Query().Get("path"))
if err != nil {
sendErrorResult(res, err)
return
}
entries, err := ctx.Backend.Ls(path)
if err != nil {
sendErrorResult(res, err)
return
}
files := []FileInfo{}
for _, entry := range entries {
f := FileInfo{
Name: entry.Name(),
Size: entry.Size(),
Time: func(t time.Time) int64 {
return t.UnixNano() / int64(time.Millisecond)
}(entry.ModTime()),
Type: func(isDir bool) string {
if isDir == true {
return "directory"
}
return "file"
}(entry.IsDir()),
}
files = append(files, f)
}
var perms *Metadata
if obj, ok := ctx.Backend.(interface{ Meta(path string) *Metadata }); ok {
perms = obj.Meta(path)
}
sendSuccessResultsWithMetadata(res, files, perms)
}
func FileCat(ctx App, res http.ResponseWriter, req *http.Request) {
path, err := pathBuilder(ctx, req.URL.Query().Get("path"))
if err != nil {
sendErrorResult(res, err)
return
}
file, err := ctx.Backend.Cat(path)
if obj, ok := file.(interface{ Close() error }); ok {
defer obj.Close()
}
if err != nil {
sendErrorResult(res, err)
return
}
http.SetCookie(res, &http.Cookie{
Name: "download",
Value: "",
MaxAge: -1,
Path: "/",
})
file, err = services.ProcessFileBeforeSend(file, &ctx, req, &res)
if err != nil {
sendErrorResult(res, err)
return
}
io.Copy(res, file)
}
func FileSave(ctx App, res http.ResponseWriter, req *http.Request) {
path, err := pathBuilder(ctx, req.URL.Query().Get("path"))
if err != nil {
sendErrorResult(res, err)
return
}
file, _, err := req.FormFile("file")
if err != nil {
sendErrorResult(res, err)
return
}
defer file.Close()
err = ctx.Backend.Save(path, file)
if obj, ok := file.(interface{ Close() error }); ok {
obj.Close()
}
if err != nil {
sendErrorResult(res, NewError(err.Error(), 403))
return
}
sendSuccessResult(res, nil)
}
func FileMv(ctx App, res http.ResponseWriter, req *http.Request) {
from, err := pathBuilder(ctx, req.URL.Query().Get("from"))
if err != nil {
sendErrorResult(res, err)
return
}
to, err := pathBuilder(ctx, req.URL.Query().Get("to"))
if err != nil {
sendErrorResult(res, err)
return
}
if from == "" || to == "" {
sendErrorResult(res, NewError("missing path parameter", 400))
return
}
err = ctx.Backend.Mv(from, to)
if err != nil {
sendErrorResult(res, err)
return
}
sendSuccessResult(res, nil)
}
func FileRm(ctx App, res http.ResponseWriter, req *http.Request) {
path, err := pathBuilder(ctx, req.URL.Query().Get("path"))
if err != nil {
sendErrorResult(res, err)
return
}
err = ctx.Backend.Rm(path)
if err != nil {
sendErrorResult(res, err)
return
}
sendSuccessResult(res, nil)
}
func FileMkdir(ctx App, res http.ResponseWriter, req *http.Request) {
path, err := pathBuilder(ctx, req.URL.Query().Get("path"))
if err != nil {
sendErrorResult(res, err)
return
}
err = ctx.Backend.Mkdir(path)
if err != nil {
sendErrorResult(res, err)
return
}
sendSuccessResult(res, nil)
}
func FileTouch(ctx App, res http.ResponseWriter, req *http.Request) {
path, err := pathBuilder(ctx, req.URL.Query().Get("path"))
if err != nil {
sendErrorResult(res, err)
return
}
err = ctx.Backend.Touch(path)
if err != nil {
sendErrorResult(res, err)
return
}
sendSuccessResult(res, nil)
}
func pathBuilder(ctx App, path string) (string, error) {
if path == "" {
return "", NewError("No path available", 400)
}
basePath := ctx.Session["path"]
basePath = filepath.Join(basePath, path)
if string(path[len(path)-1]) == "/" && basePath != "/" {
basePath += "/"
}
if strings.HasPrefix(basePath, ctx.Session["path"]) == false {
return "", NewError("There's nothing here", 403)
}
return basePath, nil
}

44
server/router/index.go Normal file
View file

@ -0,0 +1,44 @@
package router
import (
"github.com/mickael-kerjean/mux"
. "github.com/mickael-kerjean/nuage/server/common"
"log"
"net/http"
"strconv"
)
func Init(a *App) *http.Server {
r := mux.NewRouter()
session := r.PathPrefix("/api/session").Subrouter()
session.HandleFunc("", APIHandler(SessionIsValid, *a)).Methods("GET")
session.HandleFunc("", APIHandler(SessionAuthenticate, *a)).Methods("POST")
session.HandleFunc("", APIHandler(SessionLogout, *a)).Methods("DELETE")
session.Handle("/auth/{service}", APIHandler(SessionOAuthBackend, *a)).Methods("GET")
files := r.PathPrefix("/api/files").Subrouter()
files.HandleFunc("/ls", APIHandler(LoggedInOnly(FileLs), *a)).Methods("GET")
files.HandleFunc("/cat", APIHandler(LoggedInOnly(FileCat), *a)).Methods("GET")
files.HandleFunc("/cat", APIHandler(LoggedInOnly(FileSave), *a)).Methods("POST")
files.HandleFunc("/mv", APIHandler(LoggedInOnly(FileMv), *a)).Methods("GET")
files.HandleFunc("/rm", APIHandler(LoggedInOnly(FileRm), *a)).Methods("GET")
files.HandleFunc("/mkdir", APIHandler(LoggedInOnly(FileMkdir), *a)).Methods("GET")
files.HandleFunc("/touch", APIHandler(LoggedInOnly(FileTouch), *a)).Methods("GET")
r.HandleFunc("/api/config", CtxInjector(ConfigHandler, *a))
r.PathPrefix("/assets").Handler(StaticHandler("./data/public/", *a))
r.NotFoundHandler = IndexHandler("./data/public/index.html", *a)
srv := &http.Server{
Addr: ":" + strconv.Itoa(a.Config.General.Port),
Handler: r,
}
go func() {
if err := srv.ListenAndServe(); err != nil {
log.Fatal(err)
}
}()
return srv
}

179
server/router/middleware.go Normal file
View file

@ -0,0 +1,179 @@
package router
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"crypto/rand"
"encoding/base64"
"encoding/json"
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/mickael-kerjean/nuage/server/model"
"io"
"io/ioutil"
"log"
"net/http"
"os"
"strings"
"time"
)
func APIHandler(fn func(App, http.ResponseWriter, *http.Request), ctx App) http.HandlerFunc {
return func(res http.ResponseWriter, req *http.Request) {
start := time.Now()
ctx.Body, _ = extractBody(req)
ctx.Session, _ = extractSession(req, &ctx)
ctx.Backend, _ = extractBackend(req, &ctx)
res.Header().Add("Content-Type", "application/json")
resw := ResponseWriter{ResponseWriter: res}
fn(ctx, &resw, req)
req.Body.Close()
if ctx.Config.Log.Telemetry {
go telemetry(req, &resw, start, ctx.Backend.Info())
}
if ctx.Config.Log.Enable {
go logger(req, &resw, start)
}
}
}
func LoggedInOnly(fn func(App, http.ResponseWriter, *http.Request)) func(ctx App, res http.ResponseWriter, req *http.Request) {
return func(ctx App, res http.ResponseWriter, req *http.Request) {
if ctx.Backend == nil || ctx.Session == nil {
sendErrorResult(res, NewError("Forbidden", 403))
return
}
fn(ctx, res, req)
}
}
func CtxInjector(fn func(App, http.ResponseWriter, *http.Request), ctx App) http.HandlerFunc {
return http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
fn(ctx, res, req)
})
}
func extractBody(req *http.Request) (map[string]string, error) {
var body map[string]string
if strings.HasPrefix(req.Header.Get("Content-Type"), "multipart/form-data") {
return body, NewError("", 200)
}
byt, err := ioutil.ReadAll(req.Body)
if err != nil {
return nil, err
}
if err := json.Unmarshal(byt, &body); err != nil {
return nil, err
}
return body, nil
}
func extractSession(req *http.Request, ctx *App) (map[string]string, error) {
cookie, err := req.Cookie(COOKIE_NAME)
if err != nil {
return make(map[string]string), err
}
return decrypt(ctx.Config.General.SecretKey, cookie.Value)
}
func extractBackend(req *http.Request, ctx *App) (IBackend, error) {
return model.NewBackend(ctx, ctx.Session)
}
func telemetry(req *http.Request, res *ResponseWriter, start time.Time, backendType string) {
if os.Getenv("ENV") != "dev" {
point := logPoint(req, res, start, backendType)
body, err := json.Marshal(point)
if err != nil {
return
}
formData := bytes.NewReader(body)
r, _ := http.NewRequest("POST", "https://log.kerjean.me/nuage", formData)
r.Header.Set("Content-Type", "application/json")
HTTP.Do(r)
}
}
func logger(req *http.Request, res *ResponseWriter, start time.Time) {
point := logPoint(req, res, start, "")
log.Printf("%s %d %d %s %s\n", "INFO", point.Duration, point.Status, point.Method, point.RequestURI)
}
func logPoint(req *http.Request, res *ResponseWriter, start time.Time, backendType string) *LogEntry {
return &LogEntry{
Version: APP_VERSION,
Scheme: req.URL.Scheme,
Host: req.Host,
Method: req.Method,
RequestURI: req.RequestURI,
Proto: req.Proto,
Status: res.status,
UserAgent: req.Header.Get("User-Agent"),
Ip: req.RemoteAddr,
Referer: req.Referer(),
Duration: int64(time.Now().Sub(start) / (1000 * 1000)),
Timestamp: time.Now().UTC(),
Backend: backendType,
}
}
func encrypt(keystr string, text map[string]string) (string, error) {
key := []byte(keystr)
plaintext, err := json.Marshal(text)
if err != nil {
return "", NewError("json marshalling: "+err.Error(), 500)
}
block, err := aes.NewCipher(key)
if err != nil {
return "", NewError("encryption issue (cipher): "+err.Error(), 500)
}
ciphertext := make([]byte, aes.BlockSize+len(plaintext))
iv := ciphertext[:aes.BlockSize]
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
return "", NewError("encryption issue: "+err.Error(), 500)
}
stream := cipher.NewCFBEncrypter(block, iv)
stream.XORKeyStream(ciphertext[aes.BlockSize:], plaintext)
return base64.URLEncoding.EncodeToString(ciphertext), nil
}
func decrypt(keystr string, cryptoText string) (map[string]string, error) {
var raw map[string]string
key := []byte(keystr)
ciphertext, _ := base64.URLEncoding.DecodeString(cryptoText)
block, err := aes.NewCipher(key)
if err != nil || len(ciphertext) < aes.BlockSize {
return raw, NewError("Cipher is too short", 500)
}
iv := ciphertext[:aes.BlockSize]
ciphertext = ciphertext[aes.BlockSize:]
stream := cipher.NewCFBDecrypter(block, iv)
stream.XORKeyStream(ciphertext, ciphertext)
json.Unmarshal(ciphertext, &raw)
return raw, nil
}
type ResponseWriter struct {
http.ResponseWriter
status int
}
func (w *ResponseWriter) WriteHeader(status int) {
w.status = status
w.ResponseWriter.WriteHeader(status)
}
func (w *ResponseWriter) Write(b []byte) (int, error) {
if w.status == 0 {
w.status = 200
}
return w.ResponseWriter.Write(b)
}

115
server/router/session.go Normal file
View file

@ -0,0 +1,115 @@
package router
import (
"errors"
"github.com/mickael-kerjean/mux"
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/mickael-kerjean/nuage/server/model"
"net/http"
"time"
)
const (
COOKIE_NAME = "auth"
COOKIE_PATH = "/api"
)
func SessionIsValid(ctx App, res http.ResponseWriter, req *http.Request) {
if ctx.Backend == nil {
sendSuccessResult(res, false)
return
}
if _, err := ctx.Backend.Ls("/"); err != nil {
sendSuccessResult(res, false)
return
}
home, _ := model.GetHome(ctx.Backend)
if home == "" {
sendSuccessResult(res, true)
return
}
sendSuccessResult(res, true)
}
func SessionAuthenticate(ctx App, res http.ResponseWriter, req *http.Request) {
ctx.Body["timestamp"] = time.Now().String()
backend, err := model.NewBackend(&ctx, ctx.Body)
if err != nil {
sendErrorResult(res, err)
return
}
if obj, ok := backend.(interface {
OAuthToken(*map[string]string) error
}); ok {
err := obj.OAuthToken(&ctx.Body)
if err != nil {
sendErrorResult(res, NewError("Can't authenticate (OAuth error)", 401))
}
backend, err = model.NewBackend(&ctx, ctx.Body)
if err != nil {
sendErrorResult(res, NewError("Can't authenticate", 401))
}
}
home, err := model.GetHome(backend)
if err != nil {
sendErrorResult(res, err)
return
}
obfuscate, err := encrypt(ctx.Config.General.SecretKey, ctx.Body)
if err != nil {
sendErrorResult(res, NewError(err.Error(), 500))
return
}
cookie := http.Cookie{
Name: COOKIE_NAME,
Value: obfuscate,
MaxAge: 60 * 60 * 24 * 30,
Path: COOKIE_PATH,
HttpOnly: true,
}
http.SetCookie(res, &cookie)
if home == "" {
sendSuccessResult(res, nil)
} else {
sendSuccessResult(res, home)
}
}
func SessionLogout(ctx App, res http.ResponseWriter, req *http.Request) {
cookie := http.Cookie{
Name: COOKIE_NAME,
Value: "",
Path: COOKIE_PATH,
MaxAge: -1,
}
if ctx.Backend != nil {
if obj, ok := ctx.Backend.(interface{ Close() error }); ok {
go obj.Close()
}
}
http.SetCookie(res, &cookie)
sendSuccessResult(res, nil)
}
func SessionOAuthBackend(ctx App, res http.ResponseWriter, req *http.Request) {
vars := mux.Vars(req)
a := map[string]string{
"type": vars["service"],
}
b, err := model.NewBackend(&ctx, a)
if err != nil {
sendErrorResult(res, err)
return
}
obj, ok := b.(interface{ OAuthURL() string })
if ok == false {
sendErrorResult(res, errors.New("No backend authentication"))
return
}
sendSuccessResult(res, obj.OAuthURL())
}

42
server/router/static.go Normal file
View file

@ -0,0 +1,42 @@
package router
import (
. "github.com/mickael-kerjean/nuage/server/common"
"mime"
"net/http"
"os"
"path"
"path/filepath"
"strings"
)
func StaticHandler(_path string, ctx App) http.Handler {
return http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
absPath := ctx.Helpers.AbsolutePath(_path)
fsrv := http.FileServer(http.Dir(absPath))
_, err := os.Open(path.Join(absPath, req.URL.Path+".gz"))
mType := mime.TypeByExtension(filepath.Ext(req.URL.Path))
res.Header().Set("Content-Type", mType)
if err == nil && strings.Contains(req.Header.Get("Accept-Encoding"), "gzip") {
res.Header().Set("Content-Encoding", "gzip")
req.URL.Path += ".gz"
}
res.Header().Set("Cache-Control", "max-age=2592000")
fsrv.ServeHTTP(res, req)
})
}
func IndexHandler(_path string, ctx App) http.Handler {
return http.HandlerFunc(func(res http.ResponseWriter, req *http.Request) {
res.Header().Set("Content-Type", "text/html")
p := _path
if _, err := os.Open(path.Join(ctx.Config.Runtime.Dirname, p+".gz")); err == nil && strings.Contains(req.Header.Get("Accept-Encoding"), "gzip") {
res.Header().Set("Content-Encoding", "gzip")
p += ".gz"
}
http.ServeFile(res, req, ctx.Helpers.AbsolutePath(p))
})
}

62
server/router/utils.go Normal file
View file

@ -0,0 +1,62 @@
package router
import (
"encoding/json"
"net/http"
"strings"
)
type APISuccessResult struct {
Status string `json:"status"`
Result interface{} `json:"result,omitempty"`
}
type APISuccessResults struct {
Status string `json:"status"`
Results interface{} `json:"results"`
}
type APISuccessResultsWithMetadata struct {
Status string `json:"status"`
Results interface{} `json:"results"`
Metadata interface{} `json:"metadata,omitempty"`
}
type APIErrorMessage struct {
Status string `json:"status"`
Message string `json:"message,omitempty"`
}
func sendSuccessResult(res http.ResponseWriter, data interface{}) {
encoder := json.NewEncoder(res)
encoder.SetEscapeHTML(false)
encoder.Encode(APISuccessResult{"ok", data})
}
func sendSuccessResults(res http.ResponseWriter, data interface{}) {
encoder := json.NewEncoder(res)
encoder.SetEscapeHTML(false)
encoder.Encode(APISuccessResults{"ok", data})
}
func sendSuccessResultsWithMetadata(res http.ResponseWriter, data interface{}, p interface{}) {
encoder := json.NewEncoder(res)
encoder.SetEscapeHTML(false)
encoder.Encode(APISuccessResultsWithMetadata{"ok", data, p})
}
func sendErrorResult(res http.ResponseWriter, err error) {
encoder := json.NewEncoder(res)
encoder.SetEscapeHTML(false)
obj, ok := err.(interface{ Status() int })
if ok == true {
res.WriteHeader(obj.Status())
}
m := func(r string) string {
if r == "" {
return r
}
return strings.ToUpper(string(r[0])) + string(r[1:])
}(err.Error())
encoder.Encode(APIErrorMessage{"error", m})
}

View file

@ -0,0 +1,67 @@
#include <stdlib.h>
#include <libraw/libraw.h>
int save_thumbnail(const char *filename, libraw_data_t *raw){
int err;
err = libraw_dcraw_thumb_writer(raw, filename);
libraw_close(raw);
return err;
}
int raw_process(const char* filename, int min_width){
int err;
libraw_data_t *raw;
int thumbnail_working = 0;
//////////////////////
// boot up libraw
raw = libraw_init(0);
if(libraw_open_file(raw, filename) != 0){
libraw_close(raw);
return 1;
}
raw->params.output_tiff = 1;
//////////////////////
// use thumbnail if available
if(libraw_unpack_thumb(raw) == 0){
thumbnail_working = 1;
if(raw->thumbnail.twidth > min_width && raw->thumbnail.tformat == LIBRAW_THUMBNAIL_JPEG){
return save_thumbnail(filename, raw);
}
}
//////////////////////
// transcode image
if(libraw_unpack(raw) != 0){
if(thumbnail_working == 1){
return save_thumbnail(filename, raw);
}
libraw_close(raw);
return 0;
}
err = libraw_dcraw_process(raw);
if(err != 0){
if(err == LIBRAW_UNSUFFICIENT_MEMORY){
libraw_close(raw);
return -1;
}
if(thumbnail_working == 1){
return save_thumbnail(filename, raw);
}
libraw_close(raw);
return 1;
}
if(libraw_dcraw_ppm_tiff_writer(raw, filename) != 0){
if(thumbnail_working == 1){
return save_thumbnail(filename, raw);
}
libraw_close(raw);
return 1;
}
libraw_close(raw);
return 0;
}

View file

@ -0,0 +1,65 @@
package images
// #cgo pkg-config: libraw
// #include <raw.h>
// #include <stdlib.h>
import "C"
import (
. "github.com/mickael-kerjean/nuage/server/common"
"math/rand"
"time"
"unsafe"
)
const LIBRAW_MEMORY_ERROR = -1
func IsRaw(mType string) bool {
switch mType {
case "image/x-tif":
case "image/x-canon-cr2":
case "image/x-canon-crw":
case "image/x-nikon-nef":
case "image/x-nikon-nrw":
case "image/x-sony-arw":
case "image/x-sony-sr2":
case "image/x-minolta-mrw":
case "image/x-minolta-mdc":
case "image/x-olympus-orf":
case "image/x-panasonic-rw2":
case "image/x-pentax-pef":
case "image/x-epson-erf":
case "image/x-raw":
case "image/x-x3f":
case "image/x-fuji-raf":
case "image/x-aptus-mos":
case "image/x-mamiya-mef":
case "image/x-hasselblad-3fr":
case "image/x-adobe-dng":
case "image/x-samsung-srw":
case "image/x-kodak-kdc":
case "image/x-kodak-dcr":
default:
return false
}
return true
}
func ExtractPreview(t *Transform) error {
filename := C.CString(t.Temporary)
err := C.raw_process(filename, C.int(t.Size))
if err == LIBRAW_MEMORY_ERROR {
// libraw acts weird sometimes and I couldn't
// find a way to increase its available memory :(
r := rand.Intn(2000) + 500
time.Sleep(time.Duration(r) * time.Millisecond)
C.free(unsafe.Pointer(filename))
return ExtractPreview(t)
} else if err != 0 {
C.free(unsafe.Pointer(filename))
return NewError("", 500)
}
C.free(unsafe.Pointer(filename))
return nil
}

View file

@ -0,0 +1,4 @@
#include <stdlib.h>
#include <libraw/libraw.h>
int raw_process(const char* filename, int min_width);

View file

@ -0,0 +1,36 @@
#include <stdlib.h>
#include <vips/vips.h>
int resizer_init(const int ncpu, const int cache_max, const int cache_mem){
if(VIPS_INIT("nuage")){
return 1;
}
vips_concurrency_set(ncpu);
vips_cache_set_max(cache_max);
vips_cache_set_max_mem(cache_mem);
return 0;
}
int resizer_process(const char *filename, void **buf, size_t *len, int size, int crop, int quality, int exif){
VipsImage *img;
int err;
size = size > 4000 || size < 0 ? 1000 : size;
crop = crop == 0 ? VIPS_INTERESTING_NONE : VIPS_INTERESTING_CENTRE;
quality = quality > 100 || quality < 0 ? 80 : quality;
exif = exif == 0 ? TRUE : FALSE;
err = vips_thumbnail(filename, &img, size,
"size", VIPS_SIZE_DOWN,
"auto_rotate", TRUE,
"crop", crop,
NULL
);
if(err != 0){
return err;
}
err = vips_jpegsave_buffer(img, buf, len, "Q", quality, "strip", exif, NULL);
g_object_unref(img);
return err;
}

View file

@ -0,0 +1,57 @@
package images
// #cgo pkg-config: vips
// #include <resizer.h>
// #include <stdlib.h>
import "C"
import (
"bytes"
. "github.com/mickael-kerjean/nuage/server/common"
"io"
"log"
"runtime"
"unsafe"
)
var LIBVIPS_INSTALLED = false
type Transform struct {
Temporary string
Size int
Crop bool
Quality int
Exif bool
}
func init() {
if C.resizer_init(C.int(runtime.NumCPU()), 50, 1024) != 0 {
log.Println("WARNING Can't load libvips")
return
}
LIBVIPS_INSTALLED = true
}
func CreateThumbnail(t *Transform) (io.Reader, error) {
if LIBVIPS_INSTALLED == false {
return nil, NewError("Libvips not installed", 501)
}
filename := C.CString(t.Temporary)
defer C.free(unsafe.Pointer(filename))
var buffer unsafe.Pointer
len := C.size_t(0)
if C.resizer_process(filename, &buffer, &len, C.int(t.Size), boolToCInt(t.Crop), C.int(t.Quality), boolToCInt(t.Exif)) != 0 {
return nil, NewError("", 500)
}
buf := C.GoBytes(buffer, C.int(len))
C.g_free(C.gpointer(buffer))
return bytes.NewReader(buf), nil
}
func boolToCInt(val bool) C.int {
if val == false {
return C.int(0)
}
return C.int(1)
}

View file

@ -0,0 +1,6 @@
#include <stdlib.h>
#include <vips/vips.h>
int resizer_init(const int ncpu, const int cache_max, const int cache_mem);
int resizer_process(const char *filename, void **buf, size_t *len, int size, int crop, int quality, int exif);

View file

@ -0,0 +1,92 @@
package services
import (
. "github.com/mickael-kerjean/nuage/server/common"
"github.com/mickael-kerjean/nuage/server/services/images"
"io"
"net/http"
"os"
"path/filepath"
"strconv"
"strings"
)
const (
ImageCachePath = "data/cache/image/"
)
func init() {
cachePath := filepath.Join(GetCurrentDir(), ImageCachePath)
os.RemoveAll(cachePath)
os.MkdirAll(cachePath, os.ModePerm)
}
func ProcessFileBeforeSend(reader io.Reader, ctx *App, req *http.Request, res *http.ResponseWriter) (io.Reader, error) {
query := req.URL.Query()
mType := ctx.Helpers.MimeType(query.Get("path"))
(*res).Header().Set("Content-Type", mType)
if strings.HasPrefix(mType, "image/") {
if query.Get("thumbnail") != "true" && query.Get("size") == "" {
return reader, nil
}
/////////////////////////
// Specify transformation
transform := &images.Transform{
Temporary: ctx.Helpers.AbsolutePath(ImageCachePath + "image_" + RandomString(10)),
Size: 300,
Crop: true,
Quality: 50,
Exif: false,
}
if query.Get("thumbnail") == "true" {
(*res).Header().Set("Cache-Control", "max-age=259200")
} else if query.Get("size") != "" {
(*res).Header().Set("Cache-Control", "max-age=600")
size, err := strconv.ParseInt(query.Get("size"), 10, 64)
if err != nil {
return reader, nil
}
transform.Size = int(size)
transform.Crop = false
transform.Quality = 90
transform.Exif = true
}
/////////////////////////////
// Insert file in the fs
// => lower RAM usage while processing
file, err := os.OpenFile(transform.Temporary, os.O_WRONLY|os.O_CREATE, os.ModePerm)
if err != nil {
return reader, NewError("Can't use filesystem", 500)
}
io.Copy(file, reader)
file.Close()
if obj, ok := reader.(interface{ Close() error }); ok {
obj.Close()
}
defer func() {
os.Remove(transform.Temporary)
}()
/////////////////////////
// Transcode RAW image
if images.IsRaw(mType) {
if images.ExtractPreview(transform) == nil {
mType = "image/jpeg"
(*res).Header().Set("Content-Type", mType)
} else {
return reader, nil
}
}
/////////////////////////
// Final stage: resizing
if mType != "image/jpeg" && mType != "image/png" && mType != "image/gif" && mType != "image/tiff" {
return reader, nil
}
return images.CreateThumbnail(transform)
}
return reader, nil
}

View file

@ -1,26 +0,0 @@
module.exports = function(EXPIRE, REFRESH = 60000){
let conn = {};
setInterval(() => {
for(let key in conn){
if(conn[key] && conn[key].date + EXPIRE * 1000 > new Date().getTime()){
file.rm(key).then(() => delete conn[key])
}
}
}, REFRESH);
return {
get: function(key){
if(conn[key] && new Date().getTime() > conn[key].date + CACHE_TIMEOUT * 1000){
return conn[key].data;
}
return null;
},
put: function(key, data){
conn[key] = {
date: new Date(),
data: data
};
}
}
}

View file

@ -1,26 +0,0 @@
const crypto = require('crypto'),
algorithm = 'aes-256-cbc',
password = require('../../config_server')['secret_key'];
module.exports = {
encrypt: function(obj){
obj.date = new Date().getTime();
const text = JSON.stringify(obj);
const cipher = crypto.createCipher(algorithm, password);
let crypted = cipher.update(text, 'utf8', 'base64');
crypted += cipher.final('base64');
return crypted;
},
decrypt: function(text){
var dec;
try{
const decipher = crypto.createDecipher(algorithm, password);
dec = decipher.update(text, 'base64', 'utf8');
dec += decipher.final('utf8');
dec = JSON.parse(dec);
}catch(err){
dec = null;
}
return dec;
}
}