package http
import (
"fmt"
"html/template"
"io/ioutil"
"net/http"
"strings"
"git.darknebu.la/emile/faila/src/structs"
"github.com/gorilla/mux"
"github.com/sirupsen/logrus"
"github.com/spf13/viper"
)
// Server defines and runs an HTTP server
func Server() {
r := mux.NewRouter()
r.Use(loggingMiddleware)
// static js / css hosting
assets := r.PathPrefix("/assets/").Subrouter()
fs := http.FileServer(http.Dir("./hosted/assets"))
assets.PathPrefix("/").Handler(http.StripPrefix("/assets/", fs))
// download / view handlers
r.HandleFunc("/download", downloadHandler).Methods("GET")
r.HandleFunc("/view", viewHandler).Methods("GET")
// index (path) handler
t := r.PathPrefix("/").Subrouter()
t.PathPrefix("/").HandlerFunc(pathHandler)
// get the ip and port from the config
bindIP := viper.GetString("server.bindip")
listenPort := viper.GetString("server.listenport")
// define the http server
httpServer := http.Server{
Addr: fmt.Sprintf("%s:%s", bindIP, listenPort),
Handler: r,
}
logrus.Warnf("HTTP server defined listening on %s:%s", bindIP, listenPort)
logrus.Fatal(httpServer.ListenAndServe())
}
// downloadHandler handles requests to /download?file=&hash=
func downloadHandler(w http.ResponseWriter, r *http.Request) {
// get the URL queries (?file and ?hash)
query := r.URL.Query()
// decode the base64 encoded file path
queryFile := query["file"][0]
decodedFilePath, err := base64.StdEncoding.DecodeString(queryFile)
if err != nil {
logrus.Warn("Could not decode the base64 encoded filepath")
return
}
file := string(decodedFilePath)
fmt.Printf("Download file name: %s\n", file)
// get the hash provided by the user
providedhash := query["hash"][0]
// hash the provided file by first salting it and then hashing it using the
// sha256 alg
salted := fmt.Sprintf("%s%s", file, viper.GetString("hash.salt"))
hash := fmt.Sprintf("%x", sha256.Sum256([]byte(salted)))
if hash != providedhash {
logrus.Warn("hashes don't match")
return
}
root := viper.GetString("server.root")
strippedFile := strings.Replace(file, root, "", -1)
strippedFile = strings.Replace(strippedFile, "..", "", -1)
w.Header().Set("Content-Type", r.Header.Get("Content-Type"))
actualFile := fmt.Sprintf("%s%s", root, strippedFile)
http.ServeFile(w, r, actualFile)
}
func viewHandler(w http.ResponseWriter, r *http.Request) {
query := r.URL.Query()
// get the provided filepath and decode it
queryFile := query["file"][0]
decodedFilePath, err := base64.StdEncoding.DecodeString(queryFile)
if err != nil {
logrus.Warn("Could not decode the base64 encoded filepath")
return
}
file := string(decodedFilePath)
// get the provided hash
providedhash := query["hash"][0]
// hash the provided file by first salting it and then hashing it using the
// sha256 alg
salted := fmt.Sprintf("%s%s", file, viper.GetString("hash.salt"))
hash := fmt.Sprintf("%x", sha256.Sum256([]byte(salted)))
if hash != providedhash {
logrus.Warn("hashes don't match")
return
}
// mitigate path traversal
strippedFile := strings.Replace(file, "..", "", -1)
// define the path where the file is located
root := viper.GetString("server.root")
readfile := fmt.Sprintf("%s/%s", root, strippedFile)
// read the file writing it to the response writer
dat, err := ioutil.ReadFile(readfile)
if err != nil {
logrus.Warnf("Could not read file: %s", err)
}
fmt.Fprintf(w, "%s", string(dat))
}
func pathHandler(w http.ResponseWriter, r *http.Request) {
var content map[string]interface{}
content = make(map[string]interface{})
root := viper.GetString("server.root")
requestPath := fmt.Sprintf("%s%s", root, r.URL.Path)
query := r.URL.Query()
// if the ?download query is present, retirect to the download handler with
// the needed information
if query["download"] != nil {
// strip the file before and after the request
strippedFile := strings.Replace(requestPath, root, "", -1)
// base64 encode the file path
strippedFileBase64 := base64.StdEncoding.EncodeToString([]byte(strippedFile))
// redirect to the /download handler with the file path and the salted hash
path := fmt.Sprintf("/download?file=%s&hash=%s", strippedFileBase64, query["hash"][0])
http.Redirect(w, r, path, http.StatusSeeOther)
return
}
// if the ?view query is present, retirect to the view handler with the
// needed information
if query["view"] != nil {
// strip the file before and after the request
strippedFile := strings.Replace(r.URL.Path, root, "", -1)
// base64 encode the file path
strippedFileBase64 := base64.StdEncoding.EncodeToString([]byte(strippedFile))
path := fmt.Sprintf("/view?file=%s&hash=%s", strippedFileBase64, query["hash"][0])
http.Redirect(w, r, path, http.StatusSeeOther)
return
}
// define the breadcrumbs
breadcrumbsList := breadcrumbs(r)
content["Breadcrumbs"] = breadcrumbsList
// get all files in the request dir
files, err := ioutil.ReadDir(requestPath)
if err != nil {
logrus.Warnf("Could not read the content of the request dir: %s", err)
return
}
// define the items (files and dirs)
var items structs.Items
var dirCount int = 0
var fileCount int = 0
for _, f := range files {
// if the current file is not valid, continue to the next file skipping
// the rest of this loop iteration
if filterIsValid(f.Name()) == false {
continue
}
// get the file or dirs modtime and format it in a readable way as
// described in the config
modTime := f.ModTime()
if viper.GetString("time.format") == "" {
logrus.Fatalf("Please insert a format for the time in the config (time.format), see the README for more information.")
}
humanModTime := modTime.Format(viper.GetString("time.format"))
// define the file or dir's url
var url string
if r.URL.Path != "/" {
url = fmt.Sprintf("%s/%s", r.URL.Path, f.Name())
} else {
url = fmt.Sprintf("/%s", f.Name())
}
// define the file or dir
item := structs.Item{
Name: f.Name(),
HumanSize: f.Size(),
URL: url,
HumanModTime: humanModTime,
IsSymlink: false,
Size: "0",
}
// if it is a dir, define it so
if f.IsDir() == true {
item.IsDir = true
dirCount++
} else {
// if the file extension is in the list of viewable extensions,
// define the file as viewable, else, flag it as downloadable
if filterIsViewable(f.Name()) == true {
item.Viewable = true
} else {
item.Download = true
}
fileCount++
}
// Hash the file using the salt defined in the config making is
// possible to give the file to the /view handler without an
// interception being possible.
// The salted file name is hashed again on the reciever side
// making sure that the file to be downloaded is really the file
// specified.
// Overall: this makes sure that only we can provide files that
// can be downloaded
salted := fmt.Sprintf("%s%s", url, viper.GetString("hash.salt"))
hash := fmt.Sprintf("%x", sha256.Sum256([]byte(salted)))
item.SaltedHash = hash
items = append(items, item)
}
// add the items to the content map
content["Items"] = items
// add the file and dir count to the contents map
content["NumDirs"] = dirCount
content["NumFiles"] = fileCount
// if there is more than one breadcrumb, define the uppath as the second
// last breadcrumb
// I did this, because somehow things broke when simply using ".." in
// combination with hidden folders
if len(breadcrumbsList) > 1 {
content["UpPath"] = breadcrumbsList[len(breadcrumbsList)-2].Link
} else {
content["UpPath"] = ".."
}
// the original caddy file_server has the option to limit the files that are
// displayed
// TODO: Implement this sometime
content["ItemsLimitedTo"] = 100000000000
// define the sort order manually
// TODO: handle this correctly
content["Sort"] = "namedirfirst"
content["Order"] = "desc"
// Set the site's title to the title defined in the config
content["SiteTitle"] = viper.GetString("server.name")
// if we're not at the root, we can still go futher down
if r.RequestURI != "/" {
content["CanGoUp"] = "true"
}
// define a new template to render the challenges in
t := template.New("")
t, err = t.ParseGlob("./hosted/tmpl/*.html")
if err != nil {
logrus.Warn("Could not parse the template: %s", err)
return
}
err = t.ExecuteTemplate(w, "index", content)
if err != nil {
logrus.Warn("Error executing the template: %s", err)
}
return
}
// breadcrumbs get's the breadcrumbs from the request
func breadcrumbs(r *http.Request) structs.Breadcrumbs {
request := r.URL.Path
// mitigate path traversals
strippedRequest := strings.Replace(request, "..", "", -1)
if request != "/" {
strippedRequest = strings.TrimRight(strippedRequest, "/")
}
// continue without the first slash, as it produces an unused field that has
// no use
crumbs := strings.Split(strippedRequest[1:], "/")
// build the breadcrumbs from the split RequestURI
var breadcrumbs structs.Breadcrumbs
for i, crumb := range crumbs {
text := crumb
// the link is defined as the text until the given crumb
link := strings.Join(crumbs[:i+1], "/")
resultCrumb := structs.Crumb{
Text: text,
Link: fmt.Sprintf("/%s", link),
}
breadcrumbs = append(breadcrumbs, resultCrumb)
}
return breadcrumbs
}
// filter filters files returning true if they should be displayed and false if
// not. The descision is made using the hide config from the config file
func filterIsValid(name string) bool {
// hide files starting with a dot if the "hide.file" directive is set
if viper.GetBool("hide.files") == true {
if name[0] == '.' {
return false
}
}
extensions := viper.GetStringSlice("hide.extensions")
for _, extension := range extensions {
if strings.HasSuffix(name, extension) {
return false
}
}
return true
}
// filterIsViewable determines if the file with the given name is "viewable",
// this means that the link to the file contains a ?view suffix indicating that
// the file should not be downloaded but viewd in the browser
func filterIsViewable(name string) bool {
extensions := viper.GetStringSlice("view.extensions")
for _, extension := range extensions {
if strings.HasSuffix(name, extension) {
return true
}
}
return false
}