Compare commits
No commits in common. "master" and "v1.2.4" have entirely different histories.
72 changed files with 2198 additions and 3138 deletions
60
.github/workflows/buildx.yaml
vendored
60
.github/workflows/buildx.yaml
vendored
|
@ -1,60 +0,0 @@
|
|||
name: buildx
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
buildx:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Prepare
|
||||
id: prepare
|
||||
run: |
|
||||
DOCKER_IMAGE=andreimarcu/linx-server
|
||||
DOCKER_PLATFORMS=linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64/v8,linux/386
|
||||
VERSION=version-${GITHUB_REF#refs/tags/v}
|
||||
TAGS="--tag ${DOCKER_IMAGE}:${VERSION} --tag ${DOCKER_IMAGE}:latest"
|
||||
|
||||
echo ::set-output name=docker_image::${DOCKER_IMAGE}
|
||||
echo ::set-output name=version::${VERSION}
|
||||
echo ::set-output name=buildx_args::--platform ${DOCKER_PLATFORMS} \
|
||||
--build-arg VERSION=${VERSION} \
|
||||
--build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') \
|
||||
--build-arg VCS_REF=${GITHUB_SHA::8} \
|
||||
${TAGS} --file Dockerfile .
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: crazy-max/ghaction-docker-buildx@v3
|
||||
-
|
||||
name: Docker Buildx (build)
|
||||
run: |
|
||||
docker buildx build --output "type=image,push=false" ${{ steps.prepare.outputs.buildx_args }}
|
||||
-
|
||||
name: Docker Login
|
||||
if: success()
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: |
|
||||
echo "${DOCKER_PASSWORD}" | docker login --username "${DOCKER_USERNAME}" --password-stdin
|
||||
-
|
||||
name: Docker Buildx (push)
|
||||
if: success()
|
||||
run: |
|
||||
docker buildx build --output "type=image,push=true" ${{ steps.prepare.outputs.buildx_args }}
|
||||
-
|
||||
name: Docker Check Manifest
|
||||
if: always()
|
||||
run: |
|
||||
docker run --rm mplatform/mquery ${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.version }}
|
||||
-
|
||||
name: Clear
|
||||
if: always()
|
||||
run: |
|
||||
rm -f ${HOME}/.docker/config.json
|
8
.gitignore
vendored
8
.gitignore
vendored
|
@ -20,8 +20,6 @@ _cgo_defun.c
|
|||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
.DS_Store
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
|
@ -29,11 +27,5 @@ _testmain.go
|
|||
*.prof
|
||||
|
||||
linx-server
|
||||
linx-cleanup/linx-cleanup
|
||||
linx-genkey/linx-genkey
|
||||
linx-server.conf
|
||||
files/
|
||||
meta/
|
||||
binaries/
|
||||
custom_pages/
|
||||
authfile
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- "1.14"
|
||||
- 1.5
|
||||
- 1.6
|
||||
|
||||
before_script:
|
||||
- go vet ./...
|
||||
|
|
23
Dockerfile
23
Dockerfile
|
@ -1,28 +1,13 @@
|
|||
FROM golang:1.14-alpine3.11 AS build
|
||||
|
||||
COPY . /go/src/github.com/andreimarcu/linx-server
|
||||
WORKDIR /go/src/github.com/andreimarcu/linx-server
|
||||
FROM golang:alpine
|
||||
|
||||
RUN set -ex \
|
||||
&& apk add --no-cache --virtual .build-deps git \
|
||||
&& go get -v . \
|
||||
&& apk add --no-cache --virtual .build-deps git mercurial \
|
||||
&& go get github.com/andreimarcu/linx-server \
|
||||
&& apk del .build-deps
|
||||
|
||||
FROM alpine:3.11
|
||||
|
||||
COPY --from=build /go/bin/linx-server /usr/local/bin/linx-server
|
||||
|
||||
ENV GOPATH /go
|
||||
ENV SSL_CERT_FILE /etc/ssl/cert.pem
|
||||
|
||||
COPY static /go/src/github.com/andreimarcu/linx-server/static/
|
||||
COPY templates /go/src/github.com/andreimarcu/linx-server/templates/
|
||||
|
||||
RUN mkdir -p /data/files && mkdir -p /data/meta && chown -R 65534:65534 /data
|
||||
|
||||
VOLUME ["/data/files", "/data/meta"]
|
||||
|
||||
EXPOSE 8080
|
||||
USER nobody
|
||||
ENTRYPOINT ["/usr/local/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"]
|
||||
ENTRYPOINT ["/go/bin/linx-server", "-bind=0.0.0.0:8080", "-filespath=/data/files/", "-metapath=/data/meta/"]
|
||||
CMD ["-sitename=linx", "-allowhotlink"]
|
||||
|
|
183
README.md
183
README.md
|
@ -1,163 +1,75 @@
|
|||
# Development on this repository has been frozen.
|
||||
|
||||
Feel free to send a pull request if you are maintaining an active fork of this project to add a link to your repository in this readme.
|
||||
|
||||
|
||||
### Active Forks
|
||||
- ZizzyDizzyMC: [https://github.com/ZizzyDizzyMC/linx-server/](https://github.com/ZizzyDizzyMC/linx-server/)
|
||||
- Seb3thehacker: [https://github.com/Seb3thehacker/linx-server](https://github.com/Seb3thehacker/linx-server)
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
linx-server
|
||||
linx-server
|
||||
======
|
||||
[](https://travis-ci.org/andreimarcu/linx-server)
|
||||
|
||||
Self-hosted file/media sharing website.
|
||||
|
||||
### Clients
|
||||
**Official**
|
||||
- CLI: **linx-client** - [Source](https://github.com/andreimarcu/linx-client)
|
||||
|
||||
**Unofficial**
|
||||
- Android: **LinxShare** - [Source](https://github.com/iksteen/LinxShare/) | [Google Play](https://play.google.com/store/apps/details?id=org.thegraveyard.linxshare)
|
||||
- CLI: **golinx** - [Source](https://github.com/mutantmonkey/golinx)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
- Display common filetypes (image, video, audio, markdown, pdf)
|
||||
- Display syntax-highlighted code with in-place editing
|
||||
- Documented API with keys for restricting uploads
|
||||
- Documented API with keys if need to restrict uploads (can use [linx-client](https://github.com/andreimarcu/linx-client) for uploading through command-line)
|
||||
- Torrent download of files using web seeding
|
||||
- File expiry, deletion key, file access key, and random filename options
|
||||
- File expiry, deletion key, and random filename options
|
||||
|
||||
|
||||
### Screenshots
|
||||
<img width="730" src="https://user-images.githubusercontent.com/4650950/76579039-03c82680-6488-11ea-8e23-4c927386fbd9.png" />
|
||||
|
||||
<img width="180" src="https://user-images.githubusercontent.com/4650950/76578903-771d6880-6487-11ea-8baf-a4a23fef4d26.png" /> <img width="180" src="https://user-images.githubusercontent.com/4650950/76578910-7be21c80-6487-11ea-9a0a-587d59bc5f80.png" /> <img width="180" src="https://user-images.githubusercontent.com/4650950/76578908-7b498600-6487-11ea-8994-ee7b6eb9cdb1.png" /> <img width="180" src="https://user-images.githubusercontent.com/4650950/76578907-7b498600-6487-11ea-8941-8f582bf87fb0.png" />
|
||||
<img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530123/4211e946-7372-11e5-9cb5-9956c5c49d95.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530124/4217db8a-7372-11e5-957d-b3abb873dc80.png" />
|
||||
<img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530844/48d6d4e2-7379-11e5-8886-d4c32c416cbc.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530845/48dc9ae4-7379-11e5-9e59-959f7c40a573.png" /> <img width="230" src="https://cloud.githubusercontent.com/assets/4650950/10530846/48df08ec-7379-11e5-89f6-5c3f6372384d.png" />
|
||||
|
||||
|
||||
Getting started
|
||||
Get release and run
|
||||
-------------------
|
||||
|
||||
#### Using Docker
|
||||
1. Create directories ```files``` and ```meta``` and run ```chown -R 65534:65534 meta && chown -R 65534:65534 files```
|
||||
2. Create a config file (example provided in repo), we'll refer to it as __linx-server.conf__ in the following examples
|
||||
|
||||
|
||||
|
||||
Example running
|
||||
```
|
||||
docker run -p 8080:8080 -v /path/to/linx-server.conf:/data/linx-server.conf -v /path/to/meta:/data/meta -v /path/to/files:/data/files andreimarcu/linx-server -config /data/linx-server.conf
|
||||
```
|
||||
|
||||
Example with docker-compose
|
||||
```
|
||||
version: '2.2'
|
||||
services:
|
||||
linx-server:
|
||||
container_name: linx-server
|
||||
image: andreimarcu/linx-server
|
||||
command: -config /data/linx-server.conf
|
||||
volumes:
|
||||
- /path/to/files:/data/files
|
||||
- /path/to/meta:/data/meta
|
||||
- /path/to/linx-server.conf:/data/linx-server.conf
|
||||
network_mode: bridge
|
||||
ports:
|
||||
- "8080:8080"
|
||||
restart: unless-stopped
|
||||
```
|
||||
Ideally, you would use a reverse proxy such as nginx or caddy to handle TLS certificates.
|
||||
|
||||
#### Using a binary release
|
||||
|
||||
1. Grab the latest binary from the [releases](https://github.com/andreimarcu/linx-server/releases), then run ```go install```
|
||||
2. Run ```linx-server -config path/to/linx-server.conf```
|
||||
1. Grab the latest binary from the [releases](https://github.com/andreimarcu/linx-server/releases)
|
||||
2. Run ```./linx-server```
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
#### Configuration
|
||||
All configuration options are accepted either as arguments or can be placed in a file as such (see example file linx-server.conf.example in repo):
|
||||
All configuration options are accepted either as arguments or can be placed in an ini-style file as such:
|
||||
```ini
|
||||
bind = 127.0.0.1:8080
|
||||
sitename = myLinx
|
||||
maxsize = 4294967296
|
||||
maxexpiry = 86400
|
||||
# ... etc
|
||||
```
|
||||
...and then run ```linx-server -config path/to/linx-server.conf```
|
||||
allowhotlink = true
|
||||
# etc
|
||||
```
|
||||
...and then invoke ```linx-server -config path/to/config.ini```
|
||||
|
||||
#### Options
|
||||
- ```-bind 127.0.0.1:8080``` -- what to bind to (default is 127.0.0.1:8080)
|
||||
- ```-sitename myLinx``` -- the site name displayed on top (default is inferred from Host header)
|
||||
- ```-siteurl "http://mylinx.example.org/"``` -- the site url (default is inferred from execution context)
|
||||
- ```-filespath files/``` -- Path to store uploads (default is files/)
|
||||
- ```-metapath meta/``` -- Path to store information about uploads (default is meta/)
|
||||
- ```-maxsize 4294967296``` -- maximum upload file size in bytes (default 4GB)
|
||||
- ```-maxexpiry 86400``` -- maximum expiration time in seconds (default is 0, which is no expiry)
|
||||
- ```-allowhotlink``` -- Allow file hotlinking
|
||||
- ```-contentsecuritypolicy "..."``` -- Content-Security-Policy header for pages (default is "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;")
|
||||
- ```-filecontentsecuritypolicy "..."``` -- Content-Security-Policy header for files (default is "default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;")
|
||||
- ```-xframeoptions "..." ``` -- X-Frame-Options header (default is "SAMEORIGIN")
|
||||
- ```-remoteuploads``` -- (optionally) enable remote uploads (/upload?url=https://...)
|
||||
- ```-nologs``` -- (optionally) disable request logs in stdout
|
||||
- ```-googleapikey``` -- (optionally) API Key for Google's URL Shortener. ([How to create one](https://developers.google.com/url-shortener/v1/getting_started#APIKey))
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```bind = 127.0.0.1:8080``` | what to bind to (default is 127.0.0.1:8080)
|
||||
| ```sitename = myLinx``` | the site name displayed on top (default is inferred from Host header)
|
||||
| ```siteurl = https://mylinx.example.org/``` | the site url (default is inferred from execution context)
|
||||
| ```selifpath = selif``` | path relative to site base url (the "selif" in mylinx.example.org/selif/image.jpg) where files are accessed directly (default: selif)
|
||||
| ```maxsize = 4294967296``` | maximum upload file size in bytes (default 4GB)
|
||||
| ```maxexpiry = 86400``` | maximum expiration time in seconds (default is 0, which is no expiry)
|
||||
| ```allowhotlink = true``` | Allow file hotlinking
|
||||
| ```contentsecuritypolicy = "..."``` | Content-Security-Policy header for pages (default is "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
|
||||
| ```filecontentsecuritypolicy = "..."``` | Content-Security-Policy header for files (default is "default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
|
||||
| ```refererpolicy = "..."``` | Referrer-Policy header for pages (default is "same-origin")
|
||||
| ```filereferrerpolicy = "..."``` | Referrer-Policy header for files (default is "same-origin")
|
||||
| ```xframeoptions = "..." ``` | X-Frame-Options header (default is "SAMEORIGIN")
|
||||
| ```remoteuploads = true``` | (optionally) enable remote uploads (/upload?url=https://...)
|
||||
| ```nologs = true``` | (optionally) disable request logs in stdout
|
||||
| ```force-random-filename = true``` | (optionally) force the use of random filenames
|
||||
| ```custompagespath = custom_pages/``` | (optionally) specify path to directory containing markdown pages (must end in .md) that will be added to the site navigation (this can be useful for providing contact/support information and so on). For example, custom_pages/My_Page.md will become My Page in the site navigation
|
||||
#### SSL with built-in server
|
||||
- ```-certfile path/to/your.crt``` -- Path to the ssl certificate (required if you want to use the https server)
|
||||
- ```-keyfile path/to/your.key``` -- Path to the ssl key (required if you want to use the https server)
|
||||
|
||||
#### Use with http proxy
|
||||
- ```-realip``` -- let linx-server know you (nginx, etc) are providing the X-Real-IP and/or X-Forwarded-For headers.
|
||||
|
||||
#### Cleaning up expired files
|
||||
When files expire, access is disabled immediately, but the files and metadata
|
||||
will persist on disk until someone attempts to access them. You can set the following option to run cleanup every few minutes. This can also be done using a separate utility found the linx-cleanup directory.
|
||||
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```cleanup-every-minutes = 5``` | How often to clean up expired files in minutes (default is 0, which means files will be cleaned up as they are accessed)
|
||||
|
||||
#### Use with fastcgi
|
||||
- ```-fastcgi``` -- serve through fastcgi
|
||||
|
||||
#### Require API Keys for uploads
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```authfile = path/to/authfile``` | (optionally) require authorization for upload/delete by providing a newline-separated file of scrypted auth keys
|
||||
| ```remoteauthfile = path/to/remoteauthfile``` | (optionally) require authorization for remote uploads by providing a newline-separated file of scrypted auth keys
|
||||
| ```basicauth = true``` | (optionally) allow basic authorization to upload or paste files from browser when `-authfile` is enabled. When uploading, you will be prompted to enter a user and password - leave the user blank and use your auth key as the password
|
||||
- ```-authfile path/to/authfile``` -- (optionally) require authorization for upload/delete by providing a newline-separated file of scrypted auth keys
|
||||
- ```-remoteauthfile path/to/remoteauthfile``` -- (optionally) require authorization for remote uploads by providing a newline-separated file of scrypted auth keys
|
||||
|
||||
A helper utility ```linx-genkey``` is provided which hashes keys to the format required in the auth files.
|
||||
|
||||
#### Storage backends
|
||||
The following storage backends are available:
|
||||
|
||||
|Name|Notes|Options
|
||||
|----|-----|-------
|
||||
|LocalFS|Enabled by default, this backend uses the filesystem|```filespath = files/``` -- Path to store uploads (default is files/)<br />```metapath = meta/``` -- Path to store information about uploads (default is meta/)|
|
||||
|S3|Use with any S3-compatible provider.<br> This implementation will stream files through the linx instance (every download will request and stream the file from the S3 bucket). File metadata will be stored as tags on the object in the bucket.<br><br>For high-traffic environments, one might consider using an external caching layer such as described [in this article](https://blog.sentry.io/2017/03/01/dodging-s3-downtime-with-nginx-and-haproxy.html).|```s3-endpoint = https://...``` -- S3 endpoint<br>```s3-region = us-east-1``` -- S3 region<br>```s3-bucket = mybucket``` -- S3 bucket to use for files and metadata<br>```s3-force-path-style = true``` (optional) -- force path-style addresing (e.g. https://<span></span>s3.amazonaws.com/linx/example.txt)<br><br>Environment variables to provide:<br>```AWS_ACCESS_KEY_ID``` -- the S3 access key<br>```AWS_SECRET_ACCESS_KEY ``` -- the S3 secret key<br>```AWS_SESSION_TOKEN``` (optional) -- the S3 session token|
|
||||
|
||||
|
||||
#### SSL with built-in server
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```certfile = path/to/your.crt``` | Path to the ssl certificate (required if you want to use the https server)
|
||||
| ```keyfile = path/to/your.key``` | Path to the ssl key (required if you want to use the https server)
|
||||
|
||||
#### Use with http proxy
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```realip = true``` | let linx-server know you (nginx, etc) are providing the X-Real-IP and/or X-Forwarded-For headers.
|
||||
|
||||
#### Use with fastcgi
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```fastcgi = true``` | serve through fastcgi
|
||||
|
||||
Deployment
|
||||
----------
|
||||
|
@ -182,16 +94,29 @@ server {
|
|||
}
|
||||
}
|
||||
```
|
||||
And run linx-server with the ```fastcgi = true``` option.
|
||||
And run linx-server with the ```-fastcgi``` option.
|
||||
|
||||
#### 2. Using the built-in https server
|
||||
Run linx-server with the ```certfile = path/to/cert.file``` and ```keyfile = path/to/key.file``` options.
|
||||
Run linx-server with the ```-certfile path/to/cert.file``` and ```-keyfile path/to/key.file``` options.
|
||||
|
||||
#### 3. Using the built-in http server
|
||||
Run linx-server normally.
|
||||
|
||||
#### 4. Using Docker with the built-in http server
|
||||
First, build the image:
|
||||
```docker build -t linx-server .```
|
||||
|
||||
You'll need some directories for the persistent storage. For the purposes of this example, we will use `/media/meta` and `/media/files`.
|
||||
|
||||
Then, run it:
|
||||
```docker run -p 8080:8080 -v /media/meta:/data/meta -v /media/files:/data/files linx-server```
|
||||
|
||||
|
||||
Development
|
||||
-----------
|
||||
Any help is welcome, PRs will be reviewed and merged accordingly.
|
||||
The official IRC channel is #linx on irc.oftc.net
|
||||
|
||||
1. ```go get -u github.com/andreimarcu/linx-server ```
|
||||
2. ```cd $GOPATH/src/github.com/andreimarcu/linx-server ```
|
||||
3. ```go build && ./linx-server```
|
||||
|
@ -216,4 +141,4 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|||
|
||||
Author
|
||||
-------
|
||||
Andrei Marcu, https://andreim.net/
|
||||
Andrei Marcu, http://andreim.net/
|
||||
|
|
147
access.go
147
access.go
|
@ -1,147 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/flosch/pongo2"
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
type accessKeySource int
|
||||
|
||||
const (
|
||||
accessKeySourceNone accessKeySource = iota
|
||||
accessKeySourceCookie
|
||||
accessKeySourceHeader
|
||||
accessKeySourceForm
|
||||
accessKeySourceQuery
|
||||
)
|
||||
|
||||
const accessKeyHeaderName = "Linx-Access-Key"
|
||||
const accessKeyParamName = "access_key"
|
||||
|
||||
var (
|
||||
errInvalidAccessKey = errors.New("invalid access key")
|
||||
|
||||
cliUserAgentRe = regexp.MustCompile("(?i)(lib)?curl|wget")
|
||||
)
|
||||
|
||||
func checkAccessKey(r *http.Request, metadata *backends.Metadata) (accessKeySource, error) {
|
||||
key := metadata.AccessKey
|
||||
if key == "" {
|
||||
return accessKeySourceNone, nil
|
||||
}
|
||||
|
||||
cookieKey, err := r.Cookie(accessKeyHeaderName)
|
||||
if err == nil {
|
||||
if cookieKey.Value == key {
|
||||
return accessKeySourceCookie, nil
|
||||
}
|
||||
return accessKeySourceCookie, errInvalidAccessKey
|
||||
}
|
||||
|
||||
headerKey := r.Header.Get(accessKeyHeaderName)
|
||||
if headerKey == key {
|
||||
return accessKeySourceHeader, nil
|
||||
} else if headerKey != "" {
|
||||
return accessKeySourceHeader, errInvalidAccessKey
|
||||
}
|
||||
|
||||
formKey := r.PostFormValue(accessKeyParamName)
|
||||
if formKey == key {
|
||||
return accessKeySourceForm, nil
|
||||
} else if formKey != "" {
|
||||
return accessKeySourceForm, errInvalidAccessKey
|
||||
}
|
||||
|
||||
queryKey := r.URL.Query().Get(accessKeyParamName)
|
||||
if queryKey == key {
|
||||
return accessKeySourceQuery, nil
|
||||
} else if formKey != "" {
|
||||
return accessKeySourceQuery, errInvalidAccessKey
|
||||
}
|
||||
|
||||
return accessKeySourceNone, errInvalidAccessKey
|
||||
}
|
||||
|
||||
func setAccessKeyCookies(w http.ResponseWriter, siteURL, fileName, value string, expires time.Time) {
|
||||
u, err := url.Parse(siteURL)
|
||||
if err != nil {
|
||||
log.Printf("cant parse siteURL (%v): %v", siteURL, err)
|
||||
return
|
||||
}
|
||||
|
||||
cookie := http.Cookie{
|
||||
Name: accessKeyHeaderName,
|
||||
Value: value,
|
||||
HttpOnly: true,
|
||||
Domain: u.Hostname(),
|
||||
Expires: expires,
|
||||
}
|
||||
|
||||
cookie.Path = path.Join(u.Path, fileName)
|
||||
http.SetCookie(w, &cookie)
|
||||
|
||||
cookie.Path = path.Join(u.Path, Config.selifPath, fileName)
|
||||
http.SetCookie(w, &cookie)
|
||||
}
|
||||
|
||||
func fileAccessHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
if !Config.noDirectAgents && cliUserAgentRe.MatchString(r.Header.Get("User-Agent")) && !strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
fileServeHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
fileName := c.URLParams["name"]
|
||||
|
||||
metadata, err := checkFile(fileName)
|
||||
if err == backends.NotFoundErr {
|
||||
notFoundHandler(c, w, r)
|
||||
return
|
||||
} else if err != nil {
|
||||
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
|
||||
return
|
||||
}
|
||||
|
||||
if src, err := checkAccessKey(r, &metadata); err != nil {
|
||||
// remove invalid cookie
|
||||
if src == accessKeySourceCookie {
|
||||
setAccessKeyCookies(w, getSiteURL(r), fileName, "", time.Unix(0, 0))
|
||||
}
|
||||
|
||||
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
dec := json.NewEncoder(w)
|
||||
_ = dec.Encode(map[string]string{
|
||||
"error": errInvalidAccessKey.Error(),
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
_ = renderTemplate(Templates["access.html"], pongo2.Context{
|
||||
"filename": fileName,
|
||||
"accesspath": fileName,
|
||||
}, r, w)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if metadata.AccessKey != "" {
|
||||
var expiry time.Time
|
||||
if Config.accessKeyCookieExpiry != 0 {
|
||||
expiry = time.Now().Add(time.Duration(Config.accessKeyCookieExpiry) * time.Second)
|
||||
}
|
||||
setAccessKeyCookies(w, getSiteURL(r), fileName, metadata.AccessKey, expiry)
|
||||
}
|
||||
|
||||
fileDisplayHandler(c, w, r, fileName, metadata)
|
||||
}
|
116
auth.go
Normal file
116
auth.go
Normal file
|
@ -0,0 +1,116 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/base64"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"golang.org/x/crypto/scrypt"
|
||||
)
|
||||
|
||||
const (
|
||||
scryptSalt = "linx-server"
|
||||
scryptN = 16384
|
||||
scryptr = 8
|
||||
scryptp = 1
|
||||
scryptKeyLen = 32
|
||||
)
|
||||
|
||||
type AuthOptions struct {
|
||||
AuthFile string
|
||||
UnauthMethods []string
|
||||
}
|
||||
|
||||
type auth struct {
|
||||
successHandler http.Handler
|
||||
failureHandler http.Handler
|
||||
authKeys []string
|
||||
o AuthOptions
|
||||
}
|
||||
|
||||
func readAuthKeys(authFile string) []string {
|
||||
var authKeys []string
|
||||
|
||||
f, err := os.Open(authFile)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to open authfile: ", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
authKeys = append(authKeys, scanner.Text())
|
||||
}
|
||||
|
||||
err = scanner.Err()
|
||||
if err != nil {
|
||||
log.Fatal("Scanner error while reading authfile: ", err)
|
||||
}
|
||||
|
||||
return authKeys
|
||||
}
|
||||
|
||||
func checkAuth(authKeys []string, key string) (result bool, err error) {
|
||||
checkKey, err := scrypt.Key([]byte(key), []byte(scryptSalt), scryptN, scryptr, scryptp, scryptKeyLen)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
encodedKey := base64.StdEncoding.EncodeToString(checkKey)
|
||||
for _, v := range authKeys {
|
||||
if encodedKey == v {
|
||||
result = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
result = false
|
||||
return
|
||||
}
|
||||
|
||||
func (a auth) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if sliceContains(a.o.UnauthMethods, r.Method) {
|
||||
// allow unauthenticated methods
|
||||
a.successHandler.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
key := r.Header.Get("Linx-Api-Key")
|
||||
|
||||
result, err := checkAuth(a.authKeys, key)
|
||||
if err != nil || !result {
|
||||
a.failureHandler.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
a.successHandler.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func UploadAuth(o AuthOptions) func(http.Handler) http.Handler {
|
||||
fn := func(h http.Handler) http.Handler {
|
||||
return auth{
|
||||
successHandler: h,
|
||||
failureHandler: http.HandlerFunc(badAuthorizationHandler),
|
||||
authKeys: readAuthKeys(o.AuthFile),
|
||||
o: o,
|
||||
}
|
||||
}
|
||||
return fn
|
||||
}
|
||||
|
||||
func badAuthorizationHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
func sliceContains(slice []string, s string) bool {
|
||||
for _, v := range slice {
|
||||
if s == v {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
|
@ -1,154 +0,0 @@
|
|||
package apikeys
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"golang.org/x/crypto/scrypt"
|
||||
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
const (
|
||||
scryptSalt = "linx-server"
|
||||
scryptN = 16384
|
||||
scryptr = 8
|
||||
scryptp = 1
|
||||
scryptKeyLen = 32
|
||||
)
|
||||
|
||||
type AuthOptions struct {
|
||||
AuthFile string
|
||||
UnauthMethods []string
|
||||
BasicAuth bool
|
||||
SiteName string
|
||||
SitePath string
|
||||
}
|
||||
|
||||
type ApiKeysMiddleware struct {
|
||||
successHandler http.Handler
|
||||
authKeys []string
|
||||
o AuthOptions
|
||||
}
|
||||
|
||||
func ReadAuthKeys(authFile string) []string {
|
||||
var authKeys []string
|
||||
|
||||
f, err := os.Open(authFile)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to open authfile: ", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
authKeys = append(authKeys, scanner.Text())
|
||||
}
|
||||
|
||||
err = scanner.Err()
|
||||
if err != nil {
|
||||
log.Fatal("Scanner error while reading authfile: ", err)
|
||||
}
|
||||
|
||||
return authKeys
|
||||
}
|
||||
|
||||
func CheckAuth(authKeys []string, key string) (result bool, err error) {
|
||||
checkKey, err := scrypt.Key([]byte(key), []byte(scryptSalt), scryptN, scryptr, scryptp, scryptKeyLen)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
encodedKey := base64.StdEncoding.EncodeToString(checkKey)
|
||||
for _, v := range authKeys {
|
||||
if encodedKey == v {
|
||||
result = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
result = false
|
||||
return
|
||||
}
|
||||
|
||||
func (a ApiKeysMiddleware) getSitePrefix() string {
|
||||
prefix := a.o.SitePath
|
||||
if len(prefix) <= 0 || prefix[0] != '/' {
|
||||
prefix = "/" + prefix
|
||||
}
|
||||
return prefix
|
||||
}
|
||||
|
||||
func (a ApiKeysMiddleware) goodAuthorizationHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Location", a.getSitePrefix())
|
||||
w.WriteHeader(http.StatusFound)
|
||||
}
|
||||
|
||||
func (a ApiKeysMiddleware) badAuthorizationHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if a.o.BasicAuth {
|
||||
rs := ""
|
||||
if a.o.SiteName != "" {
|
||||
rs = fmt.Sprintf(` realm="%s"`, a.o.SiteName)
|
||||
}
|
||||
w.Header().Set("WWW-Authenticate", `Basic`+rs)
|
||||
}
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
func (a ApiKeysMiddleware) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
var successHandler http.Handler
|
||||
prefix := a.getSitePrefix()
|
||||
|
||||
if r.URL.Path == prefix+"auth" {
|
||||
successHandler = http.HandlerFunc(a.goodAuthorizationHandler)
|
||||
} else {
|
||||
successHandler = a.successHandler
|
||||
}
|
||||
|
||||
if sliceContains(a.o.UnauthMethods, r.Method) && r.URL.Path != prefix+"auth" {
|
||||
// allow unauthenticated methods
|
||||
successHandler.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
key := r.Header.Get("Linx-Api-Key")
|
||||
if key == "" && a.o.BasicAuth {
|
||||
_, password, ok := r.BasicAuth()
|
||||
if ok {
|
||||
key = password
|
||||
}
|
||||
}
|
||||
|
||||
result, err := CheckAuth(a.authKeys, key)
|
||||
if err != nil || !result {
|
||||
http.HandlerFunc(a.badAuthorizationHandler).ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
successHandler.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func NewApiKeysMiddleware(o AuthOptions) func(*web.C, http.Handler) http.Handler {
|
||||
fn := func(c *web.C, h http.Handler) http.Handler {
|
||||
return ApiKeysMiddleware{
|
||||
successHandler: h,
|
||||
authKeys: ReadAuthKeys(o.AuthFile),
|
||||
o: o,
|
||||
}
|
||||
}
|
||||
return fn
|
||||
}
|
||||
|
||||
func sliceContains(slice []string, s string) bool {
|
||||
for _, v := range slice {
|
||||
if s == v {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package apikeys
|
||||
package main
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
@ -10,15 +10,15 @@ func TestCheckAuth(t *testing.T) {
|
|||
"vFpNprT9wbHgwAubpvRxYCCpA2FQMAK6hFqPvAGrdZo=",
|
||||
}
|
||||
|
||||
if r, err := CheckAuth(authKeys, ""); err != nil && r {
|
||||
if r, err := checkAuth(authKeys, ""); err != nil && r {
|
||||
t.Fatal("Authorization passed for empty key")
|
||||
}
|
||||
|
||||
if r, err := CheckAuth(authKeys, "thisisnotvalid"); err != nil && r {
|
||||
if r, err := checkAuth(authKeys, "thisisnotvalid"); err != nil && r {
|
||||
t.Fatal("Authorization passed for invalid key")
|
||||
}
|
||||
|
||||
if r, err := CheckAuth(authKeys, "haPVipRnGJ0QovA9nyqK"); err != nil && !r {
|
||||
if r, err := checkAuth(authKeys, "haPVipRnGJ0QovA9nyqK"); err != nil && !r {
|
||||
t.Fatal("Authorization failed for valid key")
|
||||
}
|
||||
}
|
23
backends/backends.go
Normal file
23
backends/backends.go
Normal file
|
@ -0,0 +1,23 @@
|
|||
package backends
|
||||
|
||||
import (
|
||||
"io"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type ReadSeekCloser interface {
|
||||
io.Reader
|
||||
io.Closer
|
||||
io.Seeker
|
||||
io.ReaderAt
|
||||
}
|
||||
|
||||
type StorageBackend interface {
|
||||
Delete(key string) error
|
||||
Exists(key string) (bool, error)
|
||||
Get(key string) ([]byte, error)
|
||||
Put(key string, r io.Reader) (int64, error)
|
||||
Open(key string) (ReadSeekCloser, error)
|
||||
ServeFile(key string, w http.ResponseWriter, r *http.Request)
|
||||
Size(key string) (int64, error)
|
||||
}
|
|
@ -1,180 +1,63 @@
|
|||
package localfs
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/helpers"
|
||||
)
|
||||
|
||||
type LocalfsBackend struct {
|
||||
metaPath string
|
||||
filesPath string
|
||||
basePath string
|
||||
}
|
||||
|
||||
type MetadataJSON struct {
|
||||
DeleteKey string `json:"delete_key"`
|
||||
AccessKey string `json:"access_key,omitempty"`
|
||||
Sha256sum string `json:"sha256sum"`
|
||||
Mimetype string `json:"mimetype"`
|
||||
Size int64 `json:"size"`
|
||||
Expiry int64 `json:"expiry"`
|
||||
ArchiveFiles []string `json:"archive_files,omitempty"`
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) Delete(key string) (err error) {
|
||||
err = os.Remove(path.Join(b.filesPath, key))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
err = os.Remove(path.Join(b.metaPath, key))
|
||||
return
|
||||
func (b LocalfsBackend) Delete(key string) error {
|
||||
return os.Remove(path.Join(b.basePath, key))
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) Exists(key string) (bool, error) {
|
||||
_, err := os.Stat(path.Join(b.filesPath, key))
|
||||
_, err := os.Stat(path.Join(b.basePath, key))
|
||||
return err == nil, err
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) Head(key string) (metadata backends.Metadata, err error) {
|
||||
f, err := os.Open(path.Join(b.metaPath, key))
|
||||
if os.IsNotExist(err) {
|
||||
return metadata, backends.NotFoundErr
|
||||
} else if err != nil {
|
||||
return metadata, backends.BadMetadata
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
decoder := json.NewDecoder(f)
|
||||
|
||||
mjson := MetadataJSON{}
|
||||
if err := decoder.Decode(&mjson); err != nil {
|
||||
return metadata, backends.BadMetadata
|
||||
}
|
||||
|
||||
metadata.DeleteKey = mjson.DeleteKey
|
||||
metadata.AccessKey = mjson.AccessKey
|
||||
metadata.Mimetype = mjson.Mimetype
|
||||
metadata.ArchiveFiles = mjson.ArchiveFiles
|
||||
metadata.Sha256sum = mjson.Sha256sum
|
||||
metadata.Expiry = time.Unix(mjson.Expiry, 0)
|
||||
metadata.Size = mjson.Size
|
||||
|
||||
return
|
||||
func (b LocalfsBackend) Get(key string) ([]byte, error) {
|
||||
return ioutil.ReadFile(path.Join(b.basePath, key))
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) Get(key string) (metadata backends.Metadata, f io.ReadCloser, err error) {
|
||||
metadata, err = b.Head(key)
|
||||
func (b LocalfsBackend) Put(key string, r io.Reader) (int64, error) {
|
||||
dst, err := os.Create(path.Join(b.basePath, key))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
f, err = os.Open(path.Join(b.filesPath, key))
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) ServeFile(key string, w http.ResponseWriter, r *http.Request) (err error) {
|
||||
_, err = b.Head(key)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
filePath := path.Join(b.filesPath, key)
|
||||
http.ServeFile(w, r, filePath)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) writeMetadata(key string, metadata backends.Metadata) error {
|
||||
metaPath := path.Join(b.metaPath, key)
|
||||
|
||||
mjson := MetadataJSON{
|
||||
DeleteKey: metadata.DeleteKey,
|
||||
AccessKey: metadata.AccessKey,
|
||||
Mimetype: metadata.Mimetype,
|
||||
ArchiveFiles: metadata.ArchiveFiles,
|
||||
Sha256sum: metadata.Sha256sum,
|
||||
Expiry: metadata.Expiry.Unix(),
|
||||
Size: metadata.Size,
|
||||
}
|
||||
|
||||
dst, err := os.Create(metaPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer dst.Close()
|
||||
|
||||
encoder := json.NewEncoder(dst)
|
||||
err = encoder.Encode(mjson)
|
||||
if err != nil {
|
||||
os.Remove(metaPath)
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) Put(key string, r io.Reader, expiry time.Time, deleteKey, accessKey string) (m backends.Metadata, err error) {
|
||||
filePath := path.Join(b.filesPath, key)
|
||||
|
||||
dst, err := os.Create(filePath)
|
||||
if err != nil {
|
||||
return
|
||||
return 0, err
|
||||
}
|
||||
defer dst.Close()
|
||||
|
||||
bytes, err := io.Copy(dst, r)
|
||||
if bytes == 0 {
|
||||
os.Remove(filePath)
|
||||
return m, backends.FileEmptyError
|
||||
b.Delete(key)
|
||||
return bytes, errors.New("Empty file")
|
||||
} else if err != nil {
|
||||
os.Remove(filePath)
|
||||
return m, err
|
||||
b.Delete(key)
|
||||
return bytes, err
|
||||
}
|
||||
|
||||
dst.Seek(0, 0)
|
||||
m, err = helpers.GenerateMetadata(dst)
|
||||
if err != nil {
|
||||
os.Remove(filePath)
|
||||
return
|
||||
}
|
||||
dst.Seek(0, 0)
|
||||
|
||||
m.Expiry = expiry
|
||||
m.DeleteKey = deleteKey
|
||||
m.AccessKey = accessKey
|
||||
m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, dst)
|
||||
|
||||
err = b.writeMetadata(key, m)
|
||||
if err != nil {
|
||||
os.Remove(filePath)
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
return bytes, err
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) PutMetadata(key string, m backends.Metadata) (err error) {
|
||||
err = b.writeMetadata(key, m)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
func (b LocalfsBackend) Open(key string) (backends.ReadSeekCloser, error) {
|
||||
return os.Open(path.Join(b.basePath, key))
|
||||
}
|
||||
|
||||
return
|
||||
func (b LocalfsBackend) ServeFile(key string, w http.ResponseWriter, r *http.Request) {
|
||||
filePath := path.Join(b.basePath, key)
|
||||
http.ServeFile(w, r, filePath)
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) Size(key string) (int64, error) {
|
||||
fileInfo, err := os.Stat(path.Join(b.filesPath, key))
|
||||
fileInfo, err := os.Stat(path.Join(b.basePath, key))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
@ -182,24 +65,6 @@ func (b LocalfsBackend) Size(key string) (int64, error) {
|
|||
return fileInfo.Size(), nil
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) List() ([]string, error) {
|
||||
var output []string
|
||||
|
||||
files, err := ioutil.ReadDir(b.filesPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
output = append(output, file.Name())
|
||||
}
|
||||
|
||||
return output, nil
|
||||
}
|
||||
|
||||
func NewLocalfsBackend(metaPath string, filesPath string) LocalfsBackend {
|
||||
return LocalfsBackend{
|
||||
metaPath: metaPath,
|
||||
filesPath: filesPath,
|
||||
}
|
||||
func NewLocalfsBackend(basePath string) LocalfsBackend {
|
||||
return LocalfsBackend{basePath: basePath}
|
||||
}
|
||||
|
|
|
@ -1,18 +0,0 @@
|
|||
package backends
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Metadata struct {
|
||||
DeleteKey string
|
||||
AccessKey string
|
||||
Sha256sum string
|
||||
Mimetype string
|
||||
Size int64
|
||||
Expiry time.Time
|
||||
ArchiveFiles []string
|
||||
}
|
||||
|
||||
var BadMetadata = errors.New("Corrupted metadata.")
|
|
@ -1,269 +0,0 @@
|
|||
package s3
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/helpers"
|
||||
"github.com/aws/aws-sdk-go/aws"
|
||||
"github.com/aws/aws-sdk-go/aws/awserr"
|
||||
"github.com/aws/aws-sdk-go/aws/session"
|
||||
"github.com/aws/aws-sdk-go/service/s3"
|
||||
"github.com/aws/aws-sdk-go/service/s3/s3manager"
|
||||
)
|
||||
|
||||
type S3Backend struct {
|
||||
bucket string
|
||||
svc *s3.S3
|
||||
}
|
||||
|
||||
func (b S3Backend) Delete(key string) error {
|
||||
_, err := b.svc.DeleteObject(&s3.DeleteObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b S3Backend) Exists(key string) (bool, error) {
|
||||
_, err := b.svc.HeadObject(&s3.HeadObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
return err == nil, err
|
||||
}
|
||||
|
||||
func (b S3Backend) Head(key string) (metadata backends.Metadata, err error) {
|
||||
var result *s3.HeadObjectOutput
|
||||
result, err = b.svc.HeadObject(&s3.HeadObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
if err != nil {
|
||||
if aerr, ok := err.(awserr.Error); ok {
|
||||
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" {
|
||||
err = backends.NotFoundErr
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
metadata, err = unmapMetadata(result.Metadata)
|
||||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) Get(key string) (metadata backends.Metadata, r io.ReadCloser, err error) {
|
||||
var result *s3.GetObjectOutput
|
||||
result, err = b.svc.GetObject(&s3.GetObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
if err != nil {
|
||||
if aerr, ok := err.(awserr.Error); ok {
|
||||
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" {
|
||||
err = backends.NotFoundErr
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
metadata, err = unmapMetadata(result.Metadata)
|
||||
r = result.Body
|
||||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) ServeFile(key string, w http.ResponseWriter, r *http.Request) (err error) {
|
||||
var result *s3.GetObjectOutput
|
||||
|
||||
if r.Header.Get("Range") != "" {
|
||||
result, err = b.svc.GetObject(&s3.GetObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Range: aws.String(r.Header.Get("Range")),
|
||||
})
|
||||
|
||||
w.WriteHeader(206)
|
||||
w.Header().Set("Content-Range", *result.ContentRange)
|
||||
w.Header().Set("Content-Length", strconv.FormatInt(*result.ContentLength, 10))
|
||||
w.Header().Set("Accept-Ranges", "bytes")
|
||||
|
||||
} else {
|
||||
result, err = b.svc.GetObject(&s3.GetObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if aerr, ok := err.(awserr.Error); ok {
|
||||
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" {
|
||||
err = backends.NotFoundErr
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
_, err = io.Copy(w, result.Body)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mapMetadata(m backends.Metadata) map[string]*string {
|
||||
return map[string]*string{
|
||||
"Expiry": aws.String(strconv.FormatInt(m.Expiry.Unix(), 10)),
|
||||
"Deletekey": aws.String(m.DeleteKey),
|
||||
"Size": aws.String(strconv.FormatInt(m.Size, 10)),
|
||||
"Mimetype": aws.String(m.Mimetype),
|
||||
"Sha256sum": aws.String(m.Sha256sum),
|
||||
"AccessKey": aws.String(m.AccessKey),
|
||||
}
|
||||
}
|
||||
|
||||
func unmapMetadata(input map[string]*string) (m backends.Metadata, err error) {
|
||||
expiry, err := strconv.ParseInt(aws.StringValue(input["Expiry"]), 10, 64)
|
||||
if err != nil {
|
||||
return m, err
|
||||
}
|
||||
m.Expiry = time.Unix(expiry, 0)
|
||||
|
||||
m.Size, err = strconv.ParseInt(aws.StringValue(input["Size"]), 10, 64)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
m.DeleteKey = aws.StringValue(input["Deletekey"])
|
||||
if m.DeleteKey == "" {
|
||||
m.DeleteKey = aws.StringValue(input["Delete_key"])
|
||||
}
|
||||
|
||||
m.Mimetype = aws.StringValue(input["Mimetype"])
|
||||
m.Sha256sum = aws.StringValue(input["Sha256sum"])
|
||||
|
||||
if key, ok := input["AccessKey"]; ok {
|
||||
m.AccessKey = aws.StringValue(key)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey, accessKey string) (m backends.Metadata, err error) {
|
||||
tmpDst, err := ioutil.TempFile("", "linx-server-upload")
|
||||
if err != nil {
|
||||
return m, err
|
||||
}
|
||||
defer tmpDst.Close()
|
||||
defer os.Remove(tmpDst.Name())
|
||||
|
||||
bytes, err := io.Copy(tmpDst, r)
|
||||
if bytes == 0 {
|
||||
return m, backends.FileEmptyError
|
||||
} else if err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
_, err = tmpDst.Seek(0, 0)
|
||||
if err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
m, err = helpers.GenerateMetadata(tmpDst)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
m.Expiry = expiry
|
||||
m.DeleteKey = deleteKey
|
||||
m.AccessKey = accessKey
|
||||
// XXX: we may not be able to write this to AWS easily
|
||||
//m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, tmpDst)
|
||||
|
||||
_, err = tmpDst.Seek(0, 0)
|
||||
if err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
uploader := s3manager.NewUploaderWithClient(b.svc)
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Body: tmpDst,
|
||||
Metadata: mapMetadata(m),
|
||||
}
|
||||
_, err = uploader.Upload(input)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) PutMetadata(key string, m backends.Metadata) (err error) {
|
||||
_, err = b.svc.CopyObject(&s3.CopyObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
CopySource: aws.String("/" + b.bucket + "/" + key),
|
||||
Metadata: mapMetadata(m),
|
||||
MetadataDirective: aws.String("REPLACE"),
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) Size(key string) (int64, error) {
|
||||
input := &s3.HeadObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
}
|
||||
result, err := b.svc.HeadObject(input)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return *result.ContentLength, nil
|
||||
}
|
||||
|
||||
func (b S3Backend) List() ([]string, error) {
|
||||
var output []string
|
||||
input := &s3.ListObjectsInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
}
|
||||
|
||||
results, err := b.svc.ListObjects(input)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, object := range results.Contents {
|
||||
output = append(output, *object.Key)
|
||||
}
|
||||
|
||||
return output, nil
|
||||
}
|
||||
|
||||
func NewS3Backend(bucket string, region string, endpoint string, forcePathStyle bool) S3Backend {
|
||||
awsConfig := &aws.Config{}
|
||||
if region != "" {
|
||||
awsConfig.Region = aws.String(region)
|
||||
}
|
||||
if endpoint != "" {
|
||||
awsConfig.Endpoint = aws.String(endpoint)
|
||||
}
|
||||
if forcePathStyle == true {
|
||||
awsConfig.S3ForcePathStyle = aws.Bool(true)
|
||||
}
|
||||
|
||||
sess := session.Must(session.NewSession(awsConfig))
|
||||
svc := s3.New(sess)
|
||||
return S3Backend{bucket: bucket, svc: svc}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
package backends
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
type StorageBackend interface {
|
||||
Delete(key string) error
|
||||
Exists(key string) (bool, error)
|
||||
Head(key string) (Metadata, error)
|
||||
Get(key string) (Metadata, io.ReadCloser, error)
|
||||
Put(key string, r io.Reader, expiry time.Time, deleteKey, accessKey string) (Metadata, error)
|
||||
PutMetadata(key string, m Metadata) error
|
||||
ServeFile(key string, w http.ResponseWriter, r *http.Request) error
|
||||
Size(key string) (int64, error)
|
||||
}
|
||||
|
||||
type MetaStorageBackend interface {
|
||||
StorageBackend
|
||||
List() ([]string, error)
|
||||
}
|
||||
|
||||
var NotFoundErr = errors.New("File not found.")
|
||||
var FileEmptyError = errors.New("Empty file")
|
126
build.sh
126
build.sh
|
@ -1,76 +1,66 @@
|
|||
#!/bin/bash
|
||||
|
||||
function build_binary_rice {
|
||||
name="$1"
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch
|
||||
rice append --exec "$name"osx-$arch
|
||||
done
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch
|
||||
rice append --exec "$name"freebsd-$arch
|
||||
done
|
||||
|
||||
for arch in arm amd64 386; do
|
||||
GOOS=netbsd GOARCH=$arch go build -o "$name"netbsd-$arch
|
||||
rice append --exec "$name"netbsd-$arch
|
||||
done
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch
|
||||
rice append --exec "$name"openbsd-$arch
|
||||
done
|
||||
|
||||
for arch in arm arm64 amd64 386; do
|
||||
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch
|
||||
rice append --exec "$name"linux-$arch
|
||||
done
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe
|
||||
rice append --exec "$name"windows-$arch.exe
|
||||
done
|
||||
}
|
||||
|
||||
function build_binary {
|
||||
name="$1"
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=darwin GOARCH=$arch go build -o "$name"osx-$arch
|
||||
done
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch
|
||||
done
|
||||
|
||||
for arch in arm amd64 386; do
|
||||
GOOS=netbsd GOARCH=$arch go build -o "$name"netbsd-$arch
|
||||
done
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch
|
||||
done
|
||||
|
||||
for arch in arm arm64 amd64 386; do
|
||||
GOOS=linux GOARCH=$arch go build -o "$name"linux-$arch
|
||||
done
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=windows GOARCH=$arch go build -o "$name"windows-$arch.exe
|
||||
done
|
||||
}
|
||||
|
||||
version="$1"
|
||||
mkdir -p "binaries/""$version"
|
||||
mkdir -p "binairies/""$version"
|
||||
name="binairies/""$version""/linx-server-v""$version""_"
|
||||
|
||||
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64
|
||||
rice append --exec "$name"osx-amd64
|
||||
|
||||
GOOS=darwin GOARCH=386 go build -o "$name"osx-386
|
||||
rice append --exec "$name"osx-386
|
||||
|
||||
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64
|
||||
rice append --exec "$name"freebsd-amd64
|
||||
|
||||
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386
|
||||
rice append --exec "$name"freebsd-386
|
||||
|
||||
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64
|
||||
rice append --exec "$name"openbsd-amd64
|
||||
|
||||
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386
|
||||
rice append --exec "$name"openbsd-386
|
||||
|
||||
GOOS=linux GOARCH=arm go build -o "$name"linux-arm
|
||||
rice append --exec "$name"linux-arm
|
||||
|
||||
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64
|
||||
rice append --exec "$name"linux-amd64
|
||||
|
||||
GOOS=linux GOARCH=386 go build -o "$name"linux-386
|
||||
rice append --exec "$name"linux-386
|
||||
|
||||
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe
|
||||
rice append --exec "$name"windows-amd64.exe
|
||||
|
||||
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe
|
||||
rice append --exec "$name"windows-386.exe
|
||||
|
||||
build_binary_rice "binaries/""$version""/linx-server-v""$version""_"
|
||||
|
||||
cd linx-genkey
|
||||
build_binary "../binaries/""$version""/linx-genkey-v""$version""_"
|
||||
cd ..
|
||||
name="../binairies/""$version""/linx-genkey-v""$version""_"
|
||||
|
||||
GOOS=darwin GOARCH=amd64 go build -o "$name"osx-amd64
|
||||
|
||||
GOOS=darwin GOARCH=386 go build -o "$name"osx-386
|
||||
|
||||
GOOS=freebsd GOARCH=amd64 go build -o "$name"freebsd-amd64
|
||||
|
||||
GOOS=freebsd GOARCH=386 go build -o "$name"freebsd-386
|
||||
|
||||
GOOS=openbsd GOARCH=amd64 go build -o "$name"openbsd-amd64
|
||||
|
||||
GOOS=openbsd GOARCH=386 go build -o "$name"openbsd-386
|
||||
|
||||
GOOS=linux GOARCH=arm go build -o "$name"linux-arm
|
||||
|
||||
GOOS=linux GOARCH=amd64 go build -o "$name"linux-amd64
|
||||
|
||||
GOOS=linux GOARCH=386 go build -o "$name"linux-386
|
||||
|
||||
GOOS=windows GOARCH=amd64 go build -o "$name"windows-amd64.exe
|
||||
|
||||
GOOS=windows GOARCH=386 go build -o "$name"windows-386.exe
|
||||
|
||||
cd linx-cleanup
|
||||
build_binary "../binaries/""$version""/linx-cleanup-v""$version""_"
|
||||
cd ..
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
package cleanup
|
||||
|
||||
import (
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends/localfs"
|
||||
"github.com/andreimarcu/linx-server/expiry"
|
||||
)
|
||||
|
||||
func Cleanup(filesDir string, metaDir string, noLogs bool) {
|
||||
fileBackend := localfs.NewLocalfsBackend(metaDir, filesDir)
|
||||
|
||||
files, err := fileBackend.List()
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
for _, filename := range files {
|
||||
metadata, err := fileBackend.Head(filename)
|
||||
if err != nil {
|
||||
if !noLogs {
|
||||
log.Printf("Failed to find metadata for %s", filename)
|
||||
}
|
||||
}
|
||||
|
||||
if expiry.IsTsExpired(metadata.Expiry) {
|
||||
if !noLogs {
|
||||
log.Printf("Delete %s", filename)
|
||||
}
|
||||
fileBackend.Delete(filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func PeriodicCleanup(minutes time.Duration, filesDir string, metaDir string, noLogs bool) {
|
||||
c := time.Tick(minutes)
|
||||
for range c {
|
||||
Cleanup(filesDir, metaDir, noLogs)
|
||||
}
|
||||
|
||||
}
|
11
csp.go
11
csp.go
|
@ -6,7 +6,6 @@ import (
|
|||
|
||||
const (
|
||||
cspHeader = "Content-Security-Policy"
|
||||
rpHeader = "Referrer-Policy"
|
||||
frameOptionsHeader = "X-Frame-Options"
|
||||
)
|
||||
|
||||
|
@ -16,9 +15,8 @@ type csp struct {
|
|||
}
|
||||
|
||||
type CSPOptions struct {
|
||||
policy string
|
||||
referrerPolicy string
|
||||
frame string
|
||||
policy string
|
||||
frame string
|
||||
}
|
||||
|
||||
func (c csp) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
|
@ -27,11 +25,6 @@ func (c csp) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
|||
w.Header().Add(cspHeader, c.opts.policy)
|
||||
}
|
||||
|
||||
// only add a Referrer Policy if one is not already set
|
||||
if existing := w.Header().Get(rpHeader); existing == "" {
|
||||
w.Header().Add(rpHeader, c.opts.referrerPolicy)
|
||||
}
|
||||
|
||||
w.Header().Set(frameOptionsHeader, c.opts.frame)
|
||||
|
||||
c.h.ServeHTTP(w, r)
|
||||
|
|
12
csp_test.go
12
csp_test.go
|
@ -12,7 +12,6 @@ import (
|
|||
|
||||
var testCSPHeaders = map[string]string{
|
||||
"Content-Security-Policy": "default-src 'none'; style-src 'self';",
|
||||
"Referrer-Policy": "strict-origin-when-cross-origin",
|
||||
"X-Frame-Options": "SAMEORIGIN",
|
||||
}
|
||||
|
||||
|
@ -23,10 +22,8 @@ func TestContentSecurityPolicy(t *testing.T) {
|
|||
Config.maxSize = 1024 * 1024 * 1024
|
||||
Config.noLogs = true
|
||||
Config.siteName = "linx"
|
||||
Config.selifPath = "selif"
|
||||
Config.contentSecurityPolicy = testCSPHeaders["Content-Security-Policy"]
|
||||
Config.referrerPolicy = testCSPHeaders["Referrer-Policy"]
|
||||
Config.xFrameOptions = testCSPHeaders["X-Frame-Options"]
|
||||
Config.contentSecurityPolicy = "default-src 'none'; style-src 'self';"
|
||||
Config.xFrameOptions = "SAMEORIGIN"
|
||||
mux := setup()
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
|
@ -37,9 +34,8 @@ func TestContentSecurityPolicy(t *testing.T) {
|
|||
}
|
||||
|
||||
goji.Use(ContentSecurityPolicy(CSPOptions{
|
||||
policy: testCSPHeaders["Content-Security-Policy"],
|
||||
referrerPolicy: testCSPHeaders["Referrer-Policy"],
|
||||
frame: testCSPHeaders["X-Frame-Options"],
|
||||
policy: testCSPHeaders["Content-Security-Policy"],
|
||||
frame: testCSPHeaders["X-Frame-Options"],
|
||||
}))
|
||||
|
||||
mux.ServeHTTP(w, req)
|
||||
|
|
|
@ -1,40 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/microcosm-cc/bluemonday"
|
||||
"github.com/russross/blackfriday"
|
||||
)
|
||||
|
||||
func initializeCustomPages(customPagesDir string) {
|
||||
files, err := ioutil.ReadDir(customPagesDir)
|
||||
if err != nil {
|
||||
log.Fatal("Error reading the custom pages directory: ", err)
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
fileName := file.Name()
|
||||
|
||||
if len(fileName) <= 3 {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.EqualFold(string(fileName[len(fileName)-3:len(fileName)]), ".md") {
|
||||
contents, err := ioutil.ReadFile(path.Join(customPagesDir, fileName))
|
||||
if err != nil {
|
||||
log.Fatalf("Error reading file %s", fileName)
|
||||
}
|
||||
|
||||
unsafe := blackfriday.MarkdownCommon(contents)
|
||||
html := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
|
||||
|
||||
fileName := fileName[0 : len(fileName)-3]
|
||||
customPages[fileName] = string(html)
|
||||
customPagesNames[fileName] = strings.ReplaceAll(fileName, "_", " ")
|
||||
}
|
||||
}
|
||||
}
|
19
delete.go
19
delete.go
|
@ -3,8 +3,8 @@ package main
|
|||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
|
@ -13,19 +13,24 @@ func deleteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
filename := c.URLParams["name"]
|
||||
|
||||
// Ensure that file exists and delete key is correct
|
||||
metadata, err := storageBackend.Head(filename)
|
||||
if err == backends.NotFoundErr {
|
||||
// Ensure requested file actually exists
|
||||
if _, readErr := fileBackend.Exists(filename); os.IsNotExist(readErr) {
|
||||
notFoundHandler(c, w, r) // 404 - file doesn't exist
|
||||
return
|
||||
} else if err != nil {
|
||||
}
|
||||
|
||||
// Ensure delete key is correct
|
||||
metadata, err := metadataRead(filename)
|
||||
if err != nil {
|
||||
unauthorizedHandler(c, w, r) // 401 - no metadata available
|
||||
return
|
||||
}
|
||||
|
||||
if metadata.DeleteKey == requestKey {
|
||||
err := storageBackend.Delete(filename)
|
||||
if err != nil {
|
||||
fileDelErr := fileBackend.Delete(filename)
|
||||
metaDelErr := metaBackend.Delete(filename)
|
||||
|
||||
if (fileDelErr != nil) || (metaDelErr != nil) {
|
||||
oopsHandler(c, w, r, RespPLAIN, "Could not delete")
|
||||
return
|
||||
}
|
||||
|
|
75
display.go
75
display.go
|
@ -2,15 +2,12 @@ package main
|
|||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/expiry"
|
||||
"github.com/dustin/go-humanize"
|
||||
"github.com/flosch/pongo2"
|
||||
"github.com/microcosm-cc/bluemonday"
|
||||
|
@ -20,9 +17,22 @@ import (
|
|||
|
||||
const maxDisplayFileSizeBytes = 1024 * 512
|
||||
|
||||
func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request, fileName string, metadata backends.Metadata) {
|
||||
func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
fileName := c.URLParams["name"]
|
||||
|
||||
err := checkFile(fileName)
|
||||
if err == NotFoundErr {
|
||||
notFoundHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
metadata, err := metadataRead(fileName)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
|
||||
return
|
||||
}
|
||||
var expiryHuman string
|
||||
if metadata.Expiry != expiry.NeverExpire {
|
||||
if metadata.Expiry != neverExpire {
|
||||
expiryHuman = humanize.RelTime(time.Now(), metadata.Expiry, "", "")
|
||||
}
|
||||
sizeHuman := humanize.Bytes(uint64(metadata.Size))
|
||||
|
@ -33,12 +43,11 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request, fileNam
|
|||
|
||||
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
js, _ := json.Marshal(map[string]string{
|
||||
"filename": fileName,
|
||||
"direct_url": getSiteURL(r) + Config.selifPath + fileName,
|
||||
"expiry": strconv.FormatInt(metadata.Expiry.Unix(), 10),
|
||||
"size": strconv.FormatInt(metadata.Size, 10),
|
||||
"mimetype": metadata.Mimetype,
|
||||
"sha256sum": metadata.Sha256sum,
|
||||
"filename": fileName,
|
||||
"expiry": strconv.FormatInt(metadata.Expiry.Unix(), 10),
|
||||
"size": strconv.FormatInt(metadata.Size, 10),
|
||||
"mimetype": metadata.Mimetype,
|
||||
"sha256sum": metadata.Sha256sum,
|
||||
})
|
||||
w.Write(js)
|
||||
return
|
||||
|
@ -59,13 +68,8 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request, fileNam
|
|||
tpl = Templates["display/pdf.html"]
|
||||
|
||||
} else if extension == "story" {
|
||||
metadata, reader, err := storageBackend.Get(fileName)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, err.Error())
|
||||
}
|
||||
|
||||
if metadata.Size < maxDisplayFileSizeBytes {
|
||||
bytes, err := ioutil.ReadAll(reader)
|
||||
bytes, err := fileBackend.Get(fileName)
|
||||
if err == nil {
|
||||
extra["contents"] = string(bytes)
|
||||
lines = strings.Split(extra["contents"], "\n")
|
||||
|
@ -74,13 +78,8 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request, fileNam
|
|||
}
|
||||
|
||||
} else if extension == "md" {
|
||||
metadata, reader, err := storageBackend.Get(fileName)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, err.Error())
|
||||
}
|
||||
|
||||
if metadata.Size < maxDisplayFileSizeBytes {
|
||||
bytes, err := ioutil.ReadAll(reader)
|
||||
bytes, err := fileBackend.Get(fileName)
|
||||
if err == nil {
|
||||
unsafe := blackfriday.MarkdownCommon(bytes)
|
||||
html := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
|
||||
|
@ -91,16 +90,11 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request, fileNam
|
|||
}
|
||||
|
||||
} else if strings.HasPrefix(metadata.Mimetype, "text/") || supportedBinExtension(extension) {
|
||||
metadata, reader, err := storageBackend.Get(fileName)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, err.Error())
|
||||
}
|
||||
|
||||
if metadata.Size < maxDisplayFileSizeBytes {
|
||||
bytes, err := ioutil.ReadAll(reader)
|
||||
bytes, err := fileBackend.Get(fileName)
|
||||
if err == nil {
|
||||
extra["extension"] = extension
|
||||
extra["lang_hl"] = extensionToHlLang(extension)
|
||||
extra["lang_hl"], extra["lang_ace"] = extensionToHlAndAceLangs(extension)
|
||||
extra["contents"] = string(bytes)
|
||||
tpl = Templates["display/bin.html"]
|
||||
}
|
||||
|
@ -112,17 +106,16 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request, fileNam
|
|||
tpl = Templates["display/file.html"]
|
||||
}
|
||||
|
||||
err := renderTemplate(tpl, pongo2.Context{
|
||||
"mime": metadata.Mimetype,
|
||||
"filename": fileName,
|
||||
"size": sizeHuman,
|
||||
"expiry": expiryHuman,
|
||||
"expirylist": listExpirationTimes(),
|
||||
"extra": extra,
|
||||
"forcerandom": Config.forceRandomFilename,
|
||||
"lines": lines,
|
||||
"files": metadata.ArchiveFiles,
|
||||
"siteurl": strings.TrimSuffix(getSiteURL(r), "/"),
|
||||
err = renderTemplate(tpl, pongo2.Context{
|
||||
"mime": metadata.Mimetype,
|
||||
"filename": fileName,
|
||||
"size": sizeHuman,
|
||||
"expiry": expiryHuman,
|
||||
"extra": extra,
|
||||
"lines": lines,
|
||||
"files": metadata.ArchiveFiles,
|
||||
"shorturlEnabled": Config.googleShorterAPIKey != "",
|
||||
"shorturl": metadata.ShortURL,
|
||||
}, r, w)
|
||||
|
||||
if err != nil {
|
||||
|
|
25
expiry.go
25
expiry.go
|
@ -3,7 +3,6 @@ package main
|
|||
import (
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/expiry"
|
||||
"github.com/dustin/go-humanize"
|
||||
)
|
||||
|
||||
|
@ -22,14 +21,22 @@ type ExpirationTime struct {
|
|||
Human string
|
||||
}
|
||||
|
||||
var neverExpire = time.Unix(0, 0)
|
||||
|
||||
// Determine if a file with expiry set to "ts" has expired yet
|
||||
func isTsExpired(ts time.Time) bool {
|
||||
now := time.Now()
|
||||
return ts != neverExpire && now.After(ts)
|
||||
}
|
||||
|
||||
// Determine if the given filename is expired
|
||||
func isFileExpired(filename string) (bool, error) {
|
||||
metadata, err := storageBackend.Head(filename)
|
||||
metadata, err := metadataRead(filename)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
return expiry.IsTsExpired(metadata.Expiry), nil
|
||||
return isTsExpired(metadata.Expiry), nil
|
||||
}
|
||||
|
||||
// Return a list of expiration times and their humanized versions
|
||||
|
@ -38,16 +45,16 @@ func listExpirationTimes() []ExpirationTime {
|
|||
actualExpiryInList := false
|
||||
var expiryList []ExpirationTime
|
||||
|
||||
for _, expiryEntry := range defaultExpiryList {
|
||||
if Config.maxExpiry == 0 || expiryEntry <= Config.maxExpiry {
|
||||
if expiryEntry == Config.maxExpiry {
|
||||
for _, expiry := range defaultExpiryList {
|
||||
if Config.maxExpiry == 0 || expiry <= Config.maxExpiry {
|
||||
if expiry == Config.maxExpiry {
|
||||
actualExpiryInList = true
|
||||
}
|
||||
|
||||
duration := time.Duration(expiryEntry) * time.Second
|
||||
duration := time.Duration(expiry) * time.Second
|
||||
expiryList = append(expiryList, ExpirationTime{
|
||||
Seconds: expiryEntry,
|
||||
Human: humanize.RelTime(epoch, epoch.Add(duration), "", ""),
|
||||
expiry,
|
||||
humanize.RelTime(epoch, epoch.Add(duration), "", ""),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +0,0 @@
|
|||
package expiry
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
var NeverExpire = time.Unix(0, 0)
|
||||
|
||||
// Determine if a file with expiry set to "ts" has expired yet
|
||||
func IsTsExpired(ts time.Time) bool {
|
||||
now := time.Now()
|
||||
return ts != NeverExpire && now.After(ts)
|
||||
}
|
67
fileserve.go
67
fileserve.go
|
@ -1,41 +1,25 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/expiry"
|
||||
"github.com/andreimarcu/linx-server/httputil"
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
fileName := c.URLParams["name"]
|
||||
|
||||
metadata, err := checkFile(fileName)
|
||||
if err == backends.NotFoundErr {
|
||||
err := checkFile(fileName)
|
||||
if err == NotFoundErr {
|
||||
notFoundHandler(c, w, r)
|
||||
return
|
||||
} else if err != nil {
|
||||
} else if err == BadMetadata {
|
||||
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
|
||||
return
|
||||
}
|
||||
|
||||
if src, err := checkAccessKey(r, &metadata); err != nil {
|
||||
// remove invalid cookie
|
||||
if src == accessKeySourceCookie {
|
||||
setAccessKeyCookies(w, getSiteURL(r), fileName, "", time.Unix(0, 0))
|
||||
}
|
||||
unauthorizedHandler(c, w, r)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if !Config.allowHotlink {
|
||||
referer := r.Header.Get("Referer")
|
||||
u, _ := url.Parse(referer)
|
||||
|
@ -47,26 +31,8 @@ func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
w.Header().Set("Content-Security-Policy", Config.fileContentSecurityPolicy)
|
||||
w.Header().Set("Referrer-Policy", Config.fileReferrerPolicy)
|
||||
|
||||
w.Header().Set("Content-Type", metadata.Mimetype)
|
||||
w.Header().Set("Content-Length", strconv.FormatInt(metadata.Size, 10))
|
||||
w.Header().Set("Etag", fmt.Sprintf("\"%s\"", metadata.Sha256sum))
|
||||
w.Header().Set("Cache-Control", "public, no-cache")
|
||||
|
||||
modtime := time.Unix(0, 0)
|
||||
if done := httputil.CheckPreconditions(w, r, modtime); done == true {
|
||||
return
|
||||
}
|
||||
|
||||
if r.Method != "HEAD" {
|
||||
|
||||
storageBackend.ServeFile(fileName, w, r)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespAUTO, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
fileBackend.ServeFile(fileName, w, r)
|
||||
}
|
||||
|
||||
func staticHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
|
@ -86,24 +52,29 @@ func staticHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Etag", fmt.Sprintf("\"%s\"", timeStartedStr))
|
||||
w.Header().Set("Cache-Control", "public, max-age=86400")
|
||||
w.Header().Set("Etag", timeStartedStr)
|
||||
w.Header().Set("Cache-Control", "max-age=86400")
|
||||
http.ServeContent(w, r, filePath, timeStarted, file)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
func checkFile(filename string) (metadata backends.Metadata, err error) {
|
||||
metadata, err = storageBackend.Head(filename)
|
||||
func checkFile(filename string) error {
|
||||
_, err := fileBackend.Exists(filename)
|
||||
if err != nil {
|
||||
return
|
||||
return NotFoundErr
|
||||
}
|
||||
|
||||
if expiry.IsTsExpired(metadata.Expiry) {
|
||||
storageBackend.Delete(filename)
|
||||
err = backends.NotFoundErr
|
||||
return
|
||||
expired, err := isFileExpired(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return
|
||||
if expired {
|
||||
fileBackend.Delete(filename)
|
||||
metaBackend.Delete(filename)
|
||||
return NotFoundErr
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
19
go.mod
19
go.mod
|
@ -1,19 +0,0 @@
|
|||
module github.com/andreimarcu/linx-server
|
||||
|
||||
go 1.14
|
||||
|
||||
require (
|
||||
github.com/GeertJohan/go.rice v1.0.0
|
||||
github.com/aws/aws-sdk-go v1.29.19
|
||||
github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5
|
||||
github.com/dustin/go-humanize v1.0.0
|
||||
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4
|
||||
github.com/gabriel-vasile/mimetype v1.1.1
|
||||
github.com/microcosm-cc/bluemonday v1.0.2
|
||||
github.com/minio/sha256-simd v0.1.1
|
||||
github.com/russross/blackfriday v1.5.1
|
||||
github.com/vharitonsky/iniflags v0.0.0-20180513140207-a33cd0b5f3de
|
||||
github.com/zeebo/bencode v1.0.0
|
||||
github.com/zenazn/goji v0.9.0
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073
|
||||
)
|
76
go.sum
76
go.sum
|
@ -1,76 +0,0 @@
|
|||
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
|
||||
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=
|
||||
github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0=
|
||||
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
|
||||
github.com/aws/aws-sdk-go v1.29.19 h1:+jifYixffn6kzWygtGWFWQMv0tDGyISZHNwugF9V2sE=
|
||||
github.com/aws/aws-sdk-go v1.29.19/go.mod h1:1KvfttTE3SPKMpo8g2c6jL3ZKfXtFvKscTgahTma5Xg=
|
||||
github.com/daaku/go.zipexe v1.0.0 h1:VSOgZtH418pH9L16hC/JrgSNJbbAL26pj7lmD1+CGdY=
|
||||
github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5 h1:RAV05c0xOkJ3dZGS0JFybxFKZ2WMLabgx3uXnd7rpGs=
|
||||
github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5/go.mod h1:GgB8SF9nRG+GqaDtLcwJZsQFhcogVCJ79j4EdT0c2V4=
|
||||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4 h1:GY1+t5Dr9OKADM64SYnQjw/w99HMYvQ0A8/JoUkxVmc=
|
||||
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4/go.mod h1:T9YF2M40nIgbVgp3rreNmTged+9HrbNTIQf1PsaIiTA=
|
||||
github.com/gabriel-vasile/mimetype v1.1.1 h1:qbN9MPuRf3bstHu9zkI9jDWNfH//9+9kHxr9oRBBBOA=
|
||||
github.com/gabriel-vasile/mimetype v1.1.1/go.mod h1:6CDPel/o/3/s4+bp6kIbsWATq8pmgOisOPG40CJa6To=
|
||||
github.com/go-check/check v0.0.0-20180628173108-788fd7840127 h1:0gkP6mzaMqkmpcJYCFOLkIBwI7xFExG03bbkOkCvUPI=
|
||||
github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||
github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5 h1:rhqTjzJlm7EbkELJDKMTU7udov+Se0xZkWmugr6zGok=
|
||||
github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
|
||||
github.com/juju/loggo v0.0.0-20180524022052-584905176618 h1:MK144iBQF9hTSwBW/9eJm034bVoG30IshVm688T2hi8=
|
||||
github.com/juju/loggo v0.0.0-20180524022052-584905176618/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U=
|
||||
github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073 h1:WQM1NildKThwdP7qWrNAFGzp4ijNLw8RlgENkaI4MJs=
|
||||
github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw=
|
||||
github.com/microcosm-cc/bluemonday v1.0.2 h1:5lPfLTTAvAbtS0VqT+94yOtFnGfUWYyx0+iToC3Os3s=
|
||||
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
|
||||
github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU=
|
||||
github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM=
|
||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/russross/blackfriday v1.5.1 h1:B8ZN6pD4PVofmlDCDUdELeYrbsVIDM/bpjW3v3zgcRc=
|
||||
github.com/russross/blackfriday v1.5.1/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||
github.com/vharitonsky/iniflags v0.0.0-20180513140207-a33cd0b5f3de h1:fkw+7JkxF3U1GzQoX9h69Wvtvxajo5Rbzy6+YMMzPIg=
|
||||
github.com/vharitonsky/iniflags v0.0.0-20180513140207-a33cd0b5f3de/go.mod h1:irMhzlTz8+fVFj6CH2AN2i+WI5S6wWFtK3MBCIxIpyI=
|
||||
github.com/zeebo/bencode v1.0.0 h1:zgop0Wu1nu4IexAZeCZ5qbsjU4O1vMrfCrVgUjbHVuA=
|
||||
github.com/zeebo/bencode v1.0.0/go.mod h1:Ct7CkrWIQuLWAy9M3atFHYq4kG9Ao/SsY5cdtCXmp9Y=
|
||||
github.com/zenazn/goji v0.9.0 h1:RSQQAbXGArQ0dIDEq+PI6WqN6if+5KHu6x2Cx/GXLTQ=
|
||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 h1:xMPOj6Pz6UipU1wXLkrtqpHbR0AVFnyPEQq/wRWz9lM=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce h1:xcEWjVhvbDy+nHP67nPDDpbYrY+ILlfndk4bRioVHaU=
|
||||
gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
@ -1,70 +0,0 @@
|
|||
package helpers
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"archive/zip"
|
||||
"compress/bzip2"
|
||||
"compress/gzip"
|
||||
"io"
|
||||
"sort"
|
||||
)
|
||||
|
||||
type ReadSeekerAt interface {
|
||||
io.Reader
|
||||
io.Seeker
|
||||
io.ReaderAt
|
||||
}
|
||||
|
||||
func ListArchiveFiles(mimetype string, size int64, r ReadSeekerAt) (files []string, err error) {
|
||||
if mimetype == "application/x-tar" {
|
||||
tReadr := tar.NewReader(r)
|
||||
for {
|
||||
hdr, err := tReadr.Next()
|
||||
if err == io.EOF || err != nil {
|
||||
break
|
||||
}
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
|
||||
files = append(files, hdr.Name)
|
||||
}
|
||||
}
|
||||
sort.Strings(files)
|
||||
} else if mimetype == "application/x-gzip" {
|
||||
gzf, err := gzip.NewReader(r)
|
||||
if err == nil {
|
||||
tReadr := tar.NewReader(gzf)
|
||||
for {
|
||||
hdr, err := tReadr.Next()
|
||||
if err == io.EOF || err != nil {
|
||||
break
|
||||
}
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
|
||||
files = append(files, hdr.Name)
|
||||
}
|
||||
}
|
||||
sort.Strings(files)
|
||||
}
|
||||
} else if mimetype == "application/x-bzip" {
|
||||
bzf := bzip2.NewReader(r)
|
||||
tReadr := tar.NewReader(bzf)
|
||||
for {
|
||||
hdr, err := tReadr.Next()
|
||||
if err == io.EOF || err != nil {
|
||||
break
|
||||
}
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
|
||||
files = append(files, hdr.Name)
|
||||
}
|
||||
}
|
||||
sort.Strings(files)
|
||||
} else if mimetype == "application/zip" {
|
||||
zf, err := zip.NewReader(r, size)
|
||||
if err == nil {
|
||||
for _, f := range zf.File {
|
||||
files = append(files, f.Name)
|
||||
}
|
||||
}
|
||||
sort.Strings(files)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
|
@ -1,74 +0,0 @@
|
|||
package helpers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"unicode"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/gabriel-vasile/mimetype"
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
func GenerateMetadata(r io.Reader) (m backends.Metadata, err error) {
|
||||
// Since we don't have the ability to seek within a file, we can use a
|
||||
// Buffer in combination with a TeeReader to keep a copy of the bytes
|
||||
// we read when detecting the file type. These bytes are still needed
|
||||
// to hash the file and determine its size and cannot be discarded.
|
||||
var buf bytes.Buffer
|
||||
teeReader := io.TeeReader(r, &buf)
|
||||
|
||||
// Get first 512 bytes for mimetype detection
|
||||
header := make([]byte, 512)
|
||||
headerlen, err := teeReader.Read(header)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Create a Hash and a MultiReader that includes the Buffer we created
|
||||
// above along with the original Reader, which will have the rest of
|
||||
// the file.
|
||||
hasher := sha256.New()
|
||||
multiReader := io.MultiReader(&buf, r)
|
||||
|
||||
// Copy everything into the Hash, then use the number of bytes written
|
||||
// as the file size.
|
||||
var readLen int64
|
||||
readLen, err = io.Copy(hasher, multiReader)
|
||||
if err != nil {
|
||||
return
|
||||
} else {
|
||||
m.Size += readLen
|
||||
}
|
||||
|
||||
// Get the hex-encoded string version of the Hash checksum
|
||||
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil))
|
||||
|
||||
// Use the bytes we extracted earlier and attempt to determine the file
|
||||
// type
|
||||
kind := mimetype.Detect(header[:headerlen])
|
||||
m.Mimetype = kind.String()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func printable(data []byte) bool {
|
||||
for i, b := range data {
|
||||
r := rune(b)
|
||||
|
||||
// A null terminator that's not at the beginning of the file
|
||||
if r == 0 && i == 0 {
|
||||
return false
|
||||
} else if r == 0 && i < 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if r > unicode.MaxASCII {
|
||||
return false
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
|
@ -1,73 +0,0 @@
|
|||
package helpers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
"unicode/utf16"
|
||||
)
|
||||
|
||||
func TestGenerateMetadata(t *testing.T) {
|
||||
r := strings.NewReader("This is my test content")
|
||||
m, err := GenerateMetadata(r)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
expectedSha256sum := "966152d20a77e739716a625373ee15af16e8f4aec631a329a27da41c204b0171"
|
||||
if m.Sha256sum != expectedSha256sum {
|
||||
t.Fatalf("Sha256sum was %q instead of expected value of %q", m.Sha256sum, expectedSha256sum)
|
||||
}
|
||||
|
||||
expectedMimetype := "text/plain; charset=utf-8"
|
||||
if m.Mimetype != expectedMimetype {
|
||||
t.Fatalf("Mimetype was %q instead of expected value of %q", m.Mimetype, expectedMimetype)
|
||||
}
|
||||
|
||||
expectedSize := int64(23)
|
||||
if m.Size != expectedSize {
|
||||
t.Fatalf("Size was %d instead of expected value of %d", m.Size, expectedSize)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextCharsets(t *testing.T) {
|
||||
// verify that different text encodings are detected and passed through
|
||||
orig := "This is a text string"
|
||||
utf16 := utf16.Encode([]rune(orig))
|
||||
utf16LE := make([]byte, len(utf16)*2+2)
|
||||
utf16BE := make([]byte, len(utf16)*2+2)
|
||||
utf8 := []byte(orig)
|
||||
utf16LE[0] = 0xff
|
||||
utf16LE[1] = 0xfe
|
||||
utf16BE[0] = 0xfe
|
||||
utf16BE[1] = 0xff
|
||||
for i := 0; i < len(utf16); i++ {
|
||||
lsb := utf16[i] & 0xff
|
||||
msb := utf16[i] >> 8
|
||||
utf16LE[i*2+2] = byte(lsb)
|
||||
utf16LE[i*2+3] = byte(msb)
|
||||
utf16BE[i*2+2] = byte(msb)
|
||||
utf16BE[i*2+3] = byte(lsb)
|
||||
}
|
||||
|
||||
testcases := []struct {
|
||||
data []byte
|
||||
extension string
|
||||
mimetype string
|
||||
}{
|
||||
{mimetype: "text/plain; charset=utf-8", data: utf8},
|
||||
{mimetype: "text/plain; charset=utf-16le", data: utf16LE},
|
||||
{mimetype: "text/plain; charset=utf-16be", data: utf16BE},
|
||||
}
|
||||
|
||||
for i, testcase := range testcases {
|
||||
r := bytes.NewReader(testcase.data)
|
||||
m, err := GenerateMetadata(r)
|
||||
if err != nil {
|
||||
t.Fatalf("[%d] unexpected error return %v\n", i, err)
|
||||
}
|
||||
if m.Mimetype != testcase.mimetype {
|
||||
t.Errorf("[%d] Expected mimetype '%s', got mimetype '%s'\n", i, testcase.mimetype, m.Mimetype)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
Copyright (c) 2009 The Go Authors. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
* Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
* Redistributions in binary form must reproduce the above
|
||||
copyright notice, this list of conditions and the following disclaimer
|
||||
in the documentation and/or other materials provided with the
|
||||
distribution.
|
||||
* Neither the name of Google Inc. nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
@ -1,218 +0,0 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// HTTP file system request handler
|
||||
|
||||
package httputil
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/textproto"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// scanETag determines if a syntactically valid ETag is present at s. If so,
|
||||
// the ETag and remaining text after consuming ETag is returned. Otherwise,
|
||||
// it returns "", "".
|
||||
func scanETag(s string) (etag string, remain string) {
|
||||
s = textproto.TrimString(s)
|
||||
start := 0
|
||||
if strings.HasPrefix(s, "W/") {
|
||||
start = 2
|
||||
}
|
||||
if len(s[start:]) < 2 || s[start] != '"' {
|
||||
return "", ""
|
||||
}
|
||||
// ETag is either W/"text" or "text".
|
||||
// See RFC 7232 2.3.
|
||||
for i := start + 1; i < len(s); i++ {
|
||||
c := s[i]
|
||||
switch {
|
||||
// Character values allowed in ETags.
|
||||
case c == 0x21 || c >= 0x23 && c <= 0x7E || c >= 0x80:
|
||||
case c == '"':
|
||||
return s[:i+1], s[i+1:]
|
||||
default:
|
||||
return "", ""
|
||||
}
|
||||
}
|
||||
return "", ""
|
||||
}
|
||||
|
||||
// etagStrongMatch reports whether a and b match using strong ETag comparison.
|
||||
// Assumes a and b are valid ETags.
|
||||
func etagStrongMatch(a, b string) bool {
|
||||
return a == b && a != "" && a[0] == '"'
|
||||
}
|
||||
|
||||
// etagWeakMatch reports whether a and b match using weak ETag comparison.
|
||||
// Assumes a and b are valid ETags.
|
||||
func etagWeakMatch(a, b string) bool {
|
||||
return strings.TrimPrefix(a, "W/") == strings.TrimPrefix(b, "W/")
|
||||
}
|
||||
|
||||
// condResult is the result of an HTTP request precondition check.
|
||||
// See https://tools.ietf.org/html/rfc7232 section 3.
|
||||
type condResult int
|
||||
|
||||
const (
|
||||
condNone condResult = iota
|
||||
condTrue
|
||||
condFalse
|
||||
)
|
||||
|
||||
func checkIfMatch(w http.ResponseWriter, r *http.Request) condResult {
|
||||
im := r.Header.Get("If-Match")
|
||||
if im == "" {
|
||||
return condNone
|
||||
}
|
||||
for {
|
||||
im = textproto.TrimString(im)
|
||||
if len(im) == 0 {
|
||||
break
|
||||
}
|
||||
if im[0] == ',' {
|
||||
im = im[1:]
|
||||
continue
|
||||
}
|
||||
if im[0] == '*' {
|
||||
return condTrue
|
||||
}
|
||||
etag, remain := scanETag(im)
|
||||
if etag == "" {
|
||||
break
|
||||
}
|
||||
if etagStrongMatch(etag, w.Header().Get("Etag")) {
|
||||
return condTrue
|
||||
}
|
||||
im = remain
|
||||
}
|
||||
|
||||
return condFalse
|
||||
}
|
||||
|
||||
func checkIfUnmodifiedSince(r *http.Request, modtime time.Time) condResult {
|
||||
ius := r.Header.Get("If-Unmodified-Since")
|
||||
if ius == "" || isZeroTime(modtime) {
|
||||
return condNone
|
||||
}
|
||||
if t, err := http.ParseTime(ius); err == nil {
|
||||
// The Date-Modified header truncates sub-second precision, so
|
||||
// use mtime < t+1s instead of mtime <= t to check for unmodified.
|
||||
if modtime.Before(t.Add(1 * time.Second)) {
|
||||
return condTrue
|
||||
}
|
||||
return condFalse
|
||||
}
|
||||
return condNone
|
||||
}
|
||||
|
||||
func checkIfNoneMatch(w http.ResponseWriter, r *http.Request) condResult {
|
||||
inm := r.Header.Get("If-None-Match")
|
||||
if inm == "" {
|
||||
return condNone
|
||||
}
|
||||
buf := inm
|
||||
for {
|
||||
buf = textproto.TrimString(buf)
|
||||
if len(buf) == 0 {
|
||||
break
|
||||
}
|
||||
if buf[0] == ',' {
|
||||
buf = buf[1:]
|
||||
}
|
||||
if buf[0] == '*' {
|
||||
return condFalse
|
||||
}
|
||||
etag, remain := scanETag(buf)
|
||||
if etag == "" {
|
||||
break
|
||||
}
|
||||
if etagWeakMatch(etag, w.Header().Get("Etag")) {
|
||||
return condFalse
|
||||
}
|
||||
buf = remain
|
||||
}
|
||||
return condTrue
|
||||
}
|
||||
|
||||
func checkIfModifiedSince(r *http.Request, modtime time.Time) condResult {
|
||||
if r.Method != "GET" && r.Method != "HEAD" {
|
||||
return condNone
|
||||
}
|
||||
ims := r.Header.Get("If-Modified-Since")
|
||||
if ims == "" || isZeroTime(modtime) {
|
||||
return condNone
|
||||
}
|
||||
t, err := http.ParseTime(ims)
|
||||
if err != nil {
|
||||
return condNone
|
||||
}
|
||||
// The Date-Modified header truncates sub-second precision, so
|
||||
// use mtime < t+1s instead of mtime <= t to check for unmodified.
|
||||
if modtime.Before(t.Add(1 * time.Second)) {
|
||||
return condFalse
|
||||
}
|
||||
return condTrue
|
||||
}
|
||||
|
||||
var unixEpochTime = time.Unix(0, 0)
|
||||
|
||||
// isZeroTime reports whether t is obviously unspecified (either zero or Unix()=0).
|
||||
func isZeroTime(t time.Time) bool {
|
||||
return t.IsZero() || t.Equal(unixEpochTime)
|
||||
}
|
||||
|
||||
func setLastModified(w http.ResponseWriter, modtime time.Time) {
|
||||
if !isZeroTime(modtime) {
|
||||
w.Header().Set("Last-Modified", modtime.UTC().Format(http.TimeFormat))
|
||||
}
|
||||
}
|
||||
|
||||
func writeNotModified(w http.ResponseWriter) {
|
||||
// RFC 7232 section 4.1:
|
||||
// a sender SHOULD NOT generate representation metadata other than the
|
||||
// above listed fields unless said metadata exists for the purpose of
|
||||
// guiding cache updates (e.g., Last-Modified might be useful if the
|
||||
// response does not have an ETag field).
|
||||
h := w.Header()
|
||||
delete(h, "Content-Type")
|
||||
delete(h, "Content-Length")
|
||||
if h.Get("Etag") != "" {
|
||||
delete(h, "Last-Modified")
|
||||
}
|
||||
w.WriteHeader(http.StatusNotModified)
|
||||
}
|
||||
|
||||
// CheckPreconditions evaluates request preconditions and reports whether a precondition
|
||||
// resulted in sending StatusNotModified or StatusPreconditionFailed.
|
||||
func CheckPreconditions(w http.ResponseWriter, r *http.Request, modtime time.Time) (done bool) {
|
||||
// This function carefully follows RFC 7232 section 6.
|
||||
ch := checkIfMatch(w, r)
|
||||
if ch == condNone {
|
||||
ch = checkIfUnmodifiedSince(r, modtime)
|
||||
}
|
||||
if ch == condFalse {
|
||||
w.WriteHeader(http.StatusPreconditionFailed)
|
||||
return true
|
||||
}
|
||||
switch checkIfNoneMatch(w, r) {
|
||||
case condFalse:
|
||||
if r.Method == "GET" || r.Method == "HEAD" {
|
||||
writeNotModified(w)
|
||||
return true
|
||||
} else {
|
||||
w.WriteHeader(http.StatusPreconditionFailed)
|
||||
return true
|
||||
}
|
||||
case condNone:
|
||||
if checkIfModifiedSince(r, modtime) == condFalse {
|
||||
writeNotModified(w)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
|
||||
linx-cleanup
|
||||
-------------------------
|
||||
When files expire, access is disabled immediately, but the files and metadata
|
||||
will persist on disk until someone attempts to access them.
|
||||
|
||||
If you'd like to automatically clean up files that have expired, you can use the included `linx-cleanup` utility. To run it automatically, use a cronjob or similar type
|
||||
of scheduled task.
|
||||
|
||||
You should be careful to ensure that only one instance of `linx-cleanup` runs at
|
||||
a time to avoid unexpected behavior. It does not implement any type of locking.
|
||||
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```-filespath files/``` | Path to stored uploads (default is files/)
|
||||
| ```-nologs``` | (optionally) disable deletion logs in stdout
|
||||
| ```-metapath meta/``` | Path to stored information about uploads (default is meta/)
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
|
||||
"github.com/andreimarcu/linx-server/cleanup"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var filesDir string
|
||||
var metaDir string
|
||||
var noLogs bool
|
||||
|
||||
flag.StringVar(&filesDir, "filespath", "files/",
|
||||
"path to files directory")
|
||||
flag.StringVar(&metaDir, "metapath", "meta/",
|
||||
"path to metadata directory")
|
||||
flag.BoolVar(&noLogs, "nologs", false,
|
||||
"don't log deleted files")
|
||||
flag.Parse()
|
||||
|
||||
cleanup.Cleanup(filesDir, metaDir, noLogs)
|
||||
}
|
|
@ -1,12 +0,0 @@
|
|||
[Unit]
|
||||
Description=Self-hosted file/code/media sharing (expired files cleanup)
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=linx
|
||||
Group=linx
|
||||
ExecStart=/usr/bin/linx-cleanup
|
||||
WorkingDirectory=/srv/linx/
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
|
@ -1,8 +0,0 @@
|
|||
[Unit]
|
||||
Description=Run linx-cleanup every hour
|
||||
|
||||
[Timer]
|
||||
OnUnitActiveSec=1h
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
|
@ -1,12 +0,0 @@
|
|||
|
||||
bind = 127.0.0.1:8080
|
||||
sitename = myLinx
|
||||
siteurl = https://mylinx.example.org/
|
||||
selifpath = s
|
||||
maxsize = 4294967296
|
||||
maxexpiry = 86400
|
||||
allowhotlink = true
|
||||
remoteuploads = true
|
||||
nologs = true
|
||||
force-random-filename = false
|
||||
cleanup-every-minutes = 5
|
|
@ -6,6 +6,3 @@ After=network.target
|
|||
User=linx
|
||||
Group=linx
|
||||
ExecStart=/usr/bin/linx-server
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
|
|
217
meta.go
Normal file
217
meta.go
Normal file
|
@ -0,0 +1,217 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"archive/tar"
|
||||
"archive/zip"
|
||||
"bytes"
|
||||
"compress/bzip2"
|
||||
"compress/gzip"
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"sort"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"bitbucket.org/taruti/mimemagic"
|
||||
"github.com/dchest/uniuri"
|
||||
)
|
||||
|
||||
type MetadataJSON struct {
|
||||
DeleteKey string `json:"delete_key"`
|
||||
Sha256sum string `json:"sha256sum"`
|
||||
Mimetype string `json:"mimetype"`
|
||||
Size int64 `json:"size"`
|
||||
Expiry int64 `json:"expiry"`
|
||||
ArchiveFiles []string `json:"archive_files,omitempty"`
|
||||
ShortURL string `json:"short_url"`
|
||||
}
|
||||
|
||||
type Metadata struct {
|
||||
DeleteKey string
|
||||
Sha256sum string
|
||||
Mimetype string
|
||||
Size int64
|
||||
Expiry time.Time
|
||||
ArchiveFiles []string
|
||||
ShortURL string
|
||||
}
|
||||
|
||||
var NotFoundErr = errors.New("File not found.")
|
||||
var BadMetadata = errors.New("Corrupted metadata.")
|
||||
|
||||
func generateMetadata(fName string, exp time.Time, delKey string) (m Metadata, err error) {
|
||||
file, err := fileBackend.Open(fName)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
m.Size, err = fileBackend.Size(fName)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
m.Expiry = exp
|
||||
|
||||
if delKey == "" {
|
||||
m.DeleteKey = uniuri.NewLen(30)
|
||||
} else {
|
||||
m.DeleteKey = delKey
|
||||
}
|
||||
|
||||
// Get first 512 bytes for mimetype detection
|
||||
header := make([]byte, 512)
|
||||
file.Read(header)
|
||||
|
||||
m.Mimetype = mimemagic.Match("", header)
|
||||
|
||||
if m.Mimetype == "" {
|
||||
// Check if the file seems anything like text
|
||||
if printable(header) {
|
||||
m.Mimetype = "text/plain"
|
||||
} else {
|
||||
m.Mimetype = "application/octet-stream"
|
||||
}
|
||||
}
|
||||
|
||||
// Compute the sha256sum
|
||||
hasher := sha256.New()
|
||||
file.Seek(0, 0)
|
||||
_, err = io.Copy(hasher, file)
|
||||
if err == nil {
|
||||
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil))
|
||||
}
|
||||
file.Seek(0, 0)
|
||||
|
||||
// If archive, grab list of filenames
|
||||
if m.Mimetype == "application/x-tar" {
|
||||
tReadr := tar.NewReader(file)
|
||||
for {
|
||||
hdr, err := tReadr.Next()
|
||||
if err == io.EOF || err != nil {
|
||||
break
|
||||
}
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
|
||||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
|
||||
}
|
||||
}
|
||||
sort.Strings(m.ArchiveFiles)
|
||||
} else if m.Mimetype == "application/x-gzip" {
|
||||
gzf, err := gzip.NewReader(file)
|
||||
if err == nil {
|
||||
tReadr := tar.NewReader(gzf)
|
||||
for {
|
||||
hdr, err := tReadr.Next()
|
||||
if err == io.EOF || err != nil {
|
||||
break
|
||||
}
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
|
||||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
|
||||
}
|
||||
}
|
||||
sort.Strings(m.ArchiveFiles)
|
||||
}
|
||||
} else if m.Mimetype == "application/x-bzip" {
|
||||
bzf := bzip2.NewReader(file)
|
||||
tReadr := tar.NewReader(bzf)
|
||||
for {
|
||||
hdr, err := tReadr.Next()
|
||||
if err == io.EOF || err != nil {
|
||||
break
|
||||
}
|
||||
if hdr.Typeflag == tar.TypeDir || hdr.Typeflag == tar.TypeReg {
|
||||
m.ArchiveFiles = append(m.ArchiveFiles, hdr.Name)
|
||||
}
|
||||
}
|
||||
sort.Strings(m.ArchiveFiles)
|
||||
} else if m.Mimetype == "application/zip" {
|
||||
zf, err := zip.NewReader(file, m.Size)
|
||||
if err == nil {
|
||||
for _, f := range zf.File {
|
||||
m.ArchiveFiles = append(m.ArchiveFiles, f.Name)
|
||||
}
|
||||
}
|
||||
sort.Strings(m.ArchiveFiles)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func metadataWrite(filename string, metadata *Metadata) error {
|
||||
mjson := MetadataJSON{}
|
||||
mjson.DeleteKey = metadata.DeleteKey
|
||||
mjson.Mimetype = metadata.Mimetype
|
||||
mjson.ArchiveFiles = metadata.ArchiveFiles
|
||||
mjson.Sha256sum = metadata.Sha256sum
|
||||
mjson.Expiry = metadata.Expiry.Unix()
|
||||
mjson.Size = metadata.Size
|
||||
mjson.ShortURL = metadata.ShortURL
|
||||
|
||||
byt, err := json.Marshal(mjson)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := metaBackend.Put(filename, bytes.NewBuffer(byt)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func metadataRead(filename string) (metadata Metadata, err error) {
|
||||
b, err := metaBackend.Get(filename)
|
||||
if err != nil {
|
||||
// Metadata does not exist, generate one
|
||||
newMData, err := generateMetadata(filename, neverExpire, "")
|
||||
if err != nil {
|
||||
return metadata, err
|
||||
}
|
||||
metadataWrite(filename, &newMData)
|
||||
|
||||
b, err = metaBackend.Get(filename)
|
||||
if err != nil {
|
||||
return metadata, BadMetadata
|
||||
}
|
||||
}
|
||||
|
||||
mjson := MetadataJSON{}
|
||||
|
||||
err = json.Unmarshal(b, &mjson)
|
||||
if err != nil {
|
||||
return metadata, BadMetadata
|
||||
}
|
||||
|
||||
metadata.DeleteKey = mjson.DeleteKey
|
||||
metadata.Mimetype = mjson.Mimetype
|
||||
metadata.ArchiveFiles = mjson.ArchiveFiles
|
||||
metadata.Sha256sum = mjson.Sha256sum
|
||||
metadata.Expiry = time.Unix(mjson.Expiry, 0)
|
||||
metadata.Size = mjson.Size
|
||||
metadata.ShortURL = mjson.ShortURL
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func printable(data []byte) bool {
|
||||
for i, b := range data {
|
||||
r := rune(b)
|
||||
|
||||
// A null terminator that's not at the beginning of the file
|
||||
if r == 0 && i == 0 {
|
||||
return false
|
||||
} else if r == 0 && i < 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if r > unicode.MaxASCII {
|
||||
return false
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
61
pages.go
61
pages.go
|
@ -21,9 +21,8 @@ const (
|
|||
|
||||
func indexHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
err := renderTemplate(Templates["index.html"], pongo2.Context{
|
||||
"maxsize": Config.maxSize,
|
||||
"expirylist": listExpirationTimes(),
|
||||
"forcerandom": Config.forceRandomFilename,
|
||||
"maxsize": Config.maxSize,
|
||||
"expirylist": listExpirationTimes(),
|
||||
}, r, w)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
|
@ -32,8 +31,7 @@ func indexHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
func pasteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
err := renderTemplate(Templates["paste.html"], pongo2.Context{
|
||||
"expirylist": listExpirationTimes(),
|
||||
"forcerandom": Config.forceRandomFilename,
|
||||
"expirylist": listExpirationTimes(),
|
||||
}, r, w)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, "")
|
||||
|
@ -42,29 +40,13 @@ func pasteHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
func apiDocHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
err := renderTemplate(Templates["API.html"], pongo2.Context{
|
||||
"siteurl": getSiteURL(r),
|
||||
"forcerandom": Config.forceRandomFilename,
|
||||
"siteurl": getSiteURL(r),
|
||||
}, r, w)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, "")
|
||||
}
|
||||
}
|
||||
|
||||
func makeCustomPageHandler(fileName string) func(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
return func(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
err := renderTemplate(Templates["custom_page.html"], pongo2.Context{
|
||||
"siteurl": getSiteURL(r),
|
||||
"forcerandom": Config.forceRandomFilename,
|
||||
"contents": customPages[fileName],
|
||||
"filename": fileName,
|
||||
"pagename": customPagesNames[fileName],
|
||||
}, r, w)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func notFoundHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(404)
|
||||
err := renderTemplate(Templates["404.html"], pongo2.Context{}, r, w)
|
||||
|
@ -82,10 +64,12 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
|
|||
w.WriteHeader(500)
|
||||
renderTemplate(Templates["oops.html"], pongo2.Context{"msg": msg}, r, w)
|
||||
return
|
||||
|
||||
} else if rt == RespPLAIN {
|
||||
w.WriteHeader(500)
|
||||
fmt.Fprintf(w, "%s", msg)
|
||||
return
|
||||
|
||||
} else if rt == RespJSON {
|
||||
js, _ := json.Marshal(map[string]string{
|
||||
"error": msg,
|
||||
|
@ -95,6 +79,7 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
|
|||
w.WriteHeader(500)
|
||||
w.Write(js)
|
||||
return
|
||||
|
||||
} else if rt == RespAUTO {
|
||||
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
oopsHandler(c, w, r, RespJSON, msg)
|
||||
|
@ -104,33 +89,11 @@ func oopsHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, m
|
|||
}
|
||||
}
|
||||
|
||||
func badRequestHandler(c web.C, w http.ResponseWriter, r *http.Request, rt RespType, msg string) {
|
||||
if rt == RespHTML {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
err := renderTemplate(Templates["400.html"], pongo2.Context{"msg": msg}, r, w)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
return
|
||||
} else if rt == RespPLAIN {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
fmt.Fprintf(w, "%s", msg)
|
||||
return
|
||||
} else if rt == RespJSON {
|
||||
js, _ := json.Marshal(map[string]string{
|
||||
"error": msg,
|
||||
})
|
||||
|
||||
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
w.Write(js)
|
||||
return
|
||||
} else if rt == RespAUTO {
|
||||
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
badRequestHandler(c, w, r, RespJSON, msg)
|
||||
} else {
|
||||
badRequestHandler(c, w, r, RespHTML, msg)
|
||||
}
|
||||
func badRequestHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
err := renderTemplate(Templates["400.html"], pongo2.Context{}, r, w)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
128
server.go
128
server.go
|
@ -8,19 +8,14 @@ import (
|
|||
"net/http/fcgi"
|
||||
"net/url"
|
||||
"os"
|
||||
"os/signal"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
rice "github.com/GeertJohan/go.rice"
|
||||
"github.com/andreimarcu/linx-server/auth/apikeys"
|
||||
"github.com/GeertJohan/go.rice"
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/backends/localfs"
|
||||
"github.com/andreimarcu/linx-server/backends/s3"
|
||||
"github.com/andreimarcu/linx-server/cleanup"
|
||||
"github.com/flosch/pongo2"
|
||||
"github.com/vharitonsky/iniflags"
|
||||
"github.com/zenazn/goji/graceful"
|
||||
|
@ -46,13 +41,10 @@ var Config struct {
|
|||
siteName string
|
||||
siteURL string
|
||||
sitePath string
|
||||
selifPath string
|
||||
certFile string
|
||||
keyFile string
|
||||
contentSecurityPolicy string
|
||||
fileContentSecurityPolicy string
|
||||
referrerPolicy string
|
||||
fileReferrerPolicy string
|
||||
xFrameOptions string
|
||||
maxSize int64
|
||||
maxExpiry uint64
|
||||
|
@ -61,19 +53,10 @@ var Config struct {
|
|||
allowHotlink bool
|
||||
fastcgi bool
|
||||
remoteUploads bool
|
||||
basicAuth bool
|
||||
authFile string
|
||||
remoteAuthFile string
|
||||
addHeaders headerList
|
||||
noDirectAgents bool
|
||||
s3Endpoint string
|
||||
s3Region string
|
||||
s3Bucket string
|
||||
s3ForcePathStyle bool
|
||||
forceRandomFilename bool
|
||||
accessKeyCookieExpiry uint64
|
||||
customPagesDir string
|
||||
cleanupEveryMinutes uint64
|
||||
googleShorterAPIKey string
|
||||
}
|
||||
|
||||
var Templates = make(map[string]*pongo2.Template)
|
||||
|
@ -82,10 +65,8 @@ var staticBox *rice.Box
|
|||
var timeStarted time.Time
|
||||
var timeStartedStr string
|
||||
var remoteAuthKeys []string
|
||||
var metaStorageBackend backends.MetaStorageBackend
|
||||
var storageBackend backends.StorageBackend
|
||||
var customPages = make(map[string]string)
|
||||
var customPagesNames = make(map[string]string)
|
||||
var metaBackend backends.StorageBackend
|
||||
var fileBackend backends.StorageBackend
|
||||
|
||||
func setup() *web.Mux {
|
||||
mux := web.New()
|
||||
|
@ -104,19 +85,15 @@ func setup() *web.Mux {
|
|||
mux.Use(middleware.Recoverer)
|
||||
mux.Use(middleware.AutomaticOptions)
|
||||
mux.Use(ContentSecurityPolicy(CSPOptions{
|
||||
policy: Config.contentSecurityPolicy,
|
||||
referrerPolicy: Config.referrerPolicy,
|
||||
frame: Config.xFrameOptions,
|
||||
policy: Config.contentSecurityPolicy,
|
||||
frame: Config.xFrameOptions,
|
||||
}))
|
||||
mux.Use(AddHeaders(Config.addHeaders))
|
||||
|
||||
if Config.authFile != "" {
|
||||
mux.Use(apikeys.NewApiKeysMiddleware(apikeys.AuthOptions{
|
||||
mux.Use(UploadAuth(AuthOptions{
|
||||
AuthFile: Config.authFile,
|
||||
UnauthMethods: []string{"GET", "HEAD", "OPTIONS", "TRACE"},
|
||||
BasicAuth: Config.basicAuth,
|
||||
SiteName: Config.siteName,
|
||||
SitePath: Config.sitePath,
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -147,20 +124,8 @@ func setup() *web.Mux {
|
|||
Config.sitePath = "/"
|
||||
}
|
||||
|
||||
Config.selifPath = strings.TrimLeft(Config.selifPath, "/")
|
||||
if lastChar := Config.selifPath[len(Config.selifPath)-1:]; lastChar != "/" {
|
||||
Config.selifPath = Config.selifPath + "/"
|
||||
}
|
||||
|
||||
if Config.s3Bucket != "" {
|
||||
storageBackend = s3.NewS3Backend(Config.s3Bucket, Config.s3Region, Config.s3Endpoint, Config.s3ForcePathStyle)
|
||||
} else {
|
||||
storageBackend = localfs.NewLocalfsBackend(Config.metaDir, Config.filesDir)
|
||||
if Config.cleanupEveryMinutes > 0 {
|
||||
go cleanup.PeriodicCleanup(time.Duration(Config.cleanupEveryMinutes)*time.Minute, Config.filesDir, Config.metaDir, Config.noLogs)
|
||||
}
|
||||
|
||||
}
|
||||
metaBackend = localfs.NewLocalfsBackend(Config.metaDir)
|
||||
fileBackend = localfs.NewLocalfsBackend(Config.filesDir)
|
||||
|
||||
// Template setup
|
||||
p2l, err := NewPongo2TemplatesLoader()
|
||||
|
@ -179,11 +144,12 @@ func setup() *web.Mux {
|
|||
|
||||
// Routing setup
|
||||
nameRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)$`)
|
||||
selifRe := regexp.MustCompile("^" + Config.sitePath + Config.selifPath + `(?P<name>[a-z0-9-\.]+)$`)
|
||||
selifIndexRe := regexp.MustCompile("^" + Config.sitePath + Config.selifPath + `$`)
|
||||
selifRe := regexp.MustCompile("^" + Config.sitePath + `selif/(?P<name>[a-z0-9-\.]+)$`)
|
||||
selifIndexRe := regexp.MustCompile("^" + Config.sitePath + `selif/$`)
|
||||
torrentRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)/torrent$`)
|
||||
shortRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)/short$`)
|
||||
|
||||
if Config.authFile == "" || Config.basicAuth {
|
||||
if Config.authFile == "" {
|
||||
mux.Get(Config.sitePath, indexHandler)
|
||||
mux.Get(Config.sitePath+"paste/", pasteHandler)
|
||||
} else {
|
||||
|
@ -200,7 +166,7 @@ func setup() *web.Mux {
|
|||
mux.Get(Config.sitePath+"upload/", uploadRemote)
|
||||
|
||||
if Config.remoteAuthFile != "" {
|
||||
remoteAuthKeys = apikeys.ReadAuthKeys(Config.remoteAuthFile)
|
||||
remoteAuthKeys = readAuthKeys(Config.remoteAuthFile)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -215,18 +181,13 @@ func setup() *web.Mux {
|
|||
mux.Get(Config.sitePath+"static/*", staticHandler)
|
||||
mux.Get(Config.sitePath+"favicon.ico", staticHandler)
|
||||
mux.Get(Config.sitePath+"robots.txt", staticHandler)
|
||||
mux.Get(nameRe, fileAccessHandler)
|
||||
mux.Post(nameRe, fileAccessHandler)
|
||||
mux.Get(nameRe, fileDisplayHandler)
|
||||
mux.Get(selifRe, fileServeHandler)
|
||||
mux.Get(selifIndexRe, unauthorizedHandler)
|
||||
mux.Get(torrentRe, fileTorrentHandler)
|
||||
|
||||
if Config.customPagesDir != "" {
|
||||
initializeCustomPages(Config.customPagesDir)
|
||||
for fileName := range customPagesNames {
|
||||
mux.Get(Config.sitePath+fileName, makeCustomPageHandler(fileName))
|
||||
mux.Get(Config.sitePath+fileName+"/", makeCustomPageHandler(fileName))
|
||||
}
|
||||
if Config.googleShorterAPIKey != "" {
|
||||
mux.Get(shortRe, shortURLHandler)
|
||||
}
|
||||
|
||||
mux.NotFound(notFoundHandler)
|
||||
|
@ -241,8 +202,6 @@ func main() {
|
|||
"path to files directory")
|
||||
flag.StringVar(&Config.metaDir, "metapath", "meta/",
|
||||
"path to metadata directory")
|
||||
flag.BoolVar(&Config.basicAuth, "basicauth", false,
|
||||
"allow logging by basic auth password")
|
||||
flag.BoolVar(&Config.noLogs, "nologs", false,
|
||||
"remove stdout output for each request")
|
||||
flag.BoolVar(&Config.allowHotlink, "allowhotlink", false,
|
||||
|
@ -251,8 +210,6 @@ func main() {
|
|||
"name of the site")
|
||||
flag.StringVar(&Config.siteURL, "siteurl", "",
|
||||
"site base url (including trailing slash)")
|
||||
flag.StringVar(&Config.selifPath, "selifpath", "selif",
|
||||
"path relative to site base url where files are accessed directly")
|
||||
flag.Int64Var(&Config.maxSize, "maxsize", 4*1024*1024*1024,
|
||||
"maximum upload file size in bytes (default 4GB)")
|
||||
flag.Uint64Var(&Config.maxExpiry, "maxexpiry", 0,
|
||||
|
@ -272,65 +229,24 @@ func main() {
|
|||
flag.StringVar(&Config.remoteAuthFile, "remoteauthfile", "",
|
||||
"path to a file containing newline-separated scrypted auth keys for remote uploads")
|
||||
flag.StringVar(&Config.contentSecurityPolicy, "contentsecuritypolicy",
|
||||
"default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';",
|
||||
"default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;",
|
||||
"value of default Content-Security-Policy header")
|
||||
flag.StringVar(&Config.fileContentSecurityPolicy, "filecontentsecuritypolicy",
|
||||
"default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';",
|
||||
"default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self'; referrer origin;",
|
||||
"value of Content-Security-Policy header for file access")
|
||||
flag.StringVar(&Config.referrerPolicy, "referrerpolicy",
|
||||
"same-origin",
|
||||
"value of default Referrer-Policy header")
|
||||
flag.StringVar(&Config.fileReferrerPolicy, "filereferrerpolicy",
|
||||
"same-origin",
|
||||
"value of Referrer-Policy header for file access")
|
||||
flag.StringVar(&Config.xFrameOptions, "xframeoptions", "SAMEORIGIN",
|
||||
"value of X-Frame-Options header")
|
||||
flag.Var(&Config.addHeaders, "addheader",
|
||||
"Add an arbitrary header to the response. This option can be used multiple times.")
|
||||
flag.BoolVar(&Config.noDirectAgents, "nodirectagents", false,
|
||||
"disable serving files directly for wget/curl user agents")
|
||||
flag.StringVar(&Config.s3Endpoint, "s3-endpoint", "",
|
||||
"S3 endpoint")
|
||||
flag.StringVar(&Config.s3Region, "s3-region", "",
|
||||
"S3 region")
|
||||
flag.StringVar(&Config.s3Bucket, "s3-bucket", "",
|
||||
"S3 bucket to use for files and metadata")
|
||||
flag.BoolVar(&Config.s3ForcePathStyle, "s3-force-path-style", false,
|
||||
"Force path-style addressing for S3 (e.g. https://s3.amazonaws.com/linx/example.txt)")
|
||||
flag.BoolVar(&Config.forceRandomFilename, "force-random-filename", false,
|
||||
"Force all uploads to use a random filename")
|
||||
flag.Uint64Var(&Config.accessKeyCookieExpiry, "access-cookie-expiry", 0, "Expiration time for access key cookies in seconds (set 0 to use session cookies)")
|
||||
flag.StringVar(&Config.customPagesDir, "custompagespath", "",
|
||||
"path to directory containing .md files to render as custom pages")
|
||||
flag.Uint64Var(&Config.cleanupEveryMinutes, "cleanup-every-minutes", 0,
|
||||
"How often to clean up expired files in minutes (default is 0, which means files will be cleaned up as they are accessed)")
|
||||
flag.StringVar(&Config.googleShorterAPIKey, "googleapikey", "",
|
||||
"API Key for Google's URL Shortener.")
|
||||
|
||||
iniflags.Parse()
|
||||
|
||||
mux := setup()
|
||||
|
||||
if Config.fastcgi {
|
||||
var listener net.Listener
|
||||
var err error
|
||||
if Config.bind[0] == '/' {
|
||||
// UNIX path
|
||||
listener, err = net.ListenUnix("unix", &net.UnixAddr{Name: Config.bind, Net: "unix"})
|
||||
cleanup := func() {
|
||||
log.Print("Removing FastCGI socket")
|
||||
os.Remove(Config.bind)
|
||||
}
|
||||
defer cleanup()
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||
go func() {
|
||||
sig := <-sigs
|
||||
log.Print("Signal: ", sig)
|
||||
cleanup()
|
||||
os.Exit(0)
|
||||
}()
|
||||
} else {
|
||||
listener, err = net.Listen("tcp", Config.bind)
|
||||
}
|
||||
listener, err := net.Listen("tcp", Config.bind)
|
||||
if err != nil {
|
||||
log.Fatal("Could not bind: ", err)
|
||||
}
|
||||
|
|
222
server_test.go
222
server_test.go
|
@ -173,7 +173,7 @@ func TestFileNotFound(t *testing.T) {
|
|||
|
||||
filename := generateBarename()
|
||||
|
||||
req, err := http.NewRequest("GET", "/"+Config.selifPath+filename, nil)
|
||||
req, err := http.NewRequest("GET", "/selif/"+filename, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
@ -450,9 +450,7 @@ func TestPostJSONUploadMaxExpiry(t *testing.T) {
|
|||
mux := setup()
|
||||
Config.maxExpiry = 300
|
||||
|
||||
// include 0 to test edge case
|
||||
// https://github.com/andreimarcu/linx-server/issues/111
|
||||
testExpiries := []string{"86400", "-150", "0"}
|
||||
testExpiries := []string{"86400", "-150"}
|
||||
for _, expiry := range testExpiries {
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
|
@ -486,6 +484,7 @@ func TestPostJSONUploadMaxExpiry(t *testing.T) {
|
|||
var myjson RespOkJSON
|
||||
err = json.Unmarshal([]byte(w.Body.String()), &myjson)
|
||||
if err != nil {
|
||||
fmt.Println(w.Body.String())
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
|
@ -642,45 +641,14 @@ func TestPostEmptyUpload(t *testing.T) {
|
|||
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != 400 {
|
||||
if w.Code != 500 {
|
||||
t.Log(w.Body.String())
|
||||
t.Fatalf("Status code is not 400, but %d", w.Code)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPostTooLargeUpload(t *testing.T) {
|
||||
mux := setup()
|
||||
oldMaxSize := Config.maxSize
|
||||
Config.maxSize = 2
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
filename := generateBarename() + ".txt"
|
||||
|
||||
var b bytes.Buffer
|
||||
mw := multipart.NewWriter(&b)
|
||||
fw, err := mw.CreateFormFile("file", filename)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
t.Fatalf("Status code is not 500, but %d", w.Code)
|
||||
}
|
||||
|
||||
fw.Write([]byte("test content"))
|
||||
mw.Close()
|
||||
|
||||
req, err := http.NewRequest("POST", "/upload/", &b)
|
||||
req.Header.Set("Content-Type", mw.FormDataContentType())
|
||||
req.Header.Set("Referer", Config.siteURL)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
if !strings.Contains(w.Body.String(), "Empty file") {
|
||||
t.Fatal("Response did not contain 'Empty file'")
|
||||
}
|
||||
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != 400 {
|
||||
t.Log(w.Body.String())
|
||||
t.Fatalf("Status code is not 400, but %d", w.Code)
|
||||
}
|
||||
|
||||
Config.maxSize = oldMaxSize
|
||||
}
|
||||
|
||||
func TestPostEmptyJSONUpload(t *testing.T) {
|
||||
|
@ -709,9 +677,9 @@ func TestPostEmptyJSONUpload(t *testing.T) {
|
|||
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != 400 {
|
||||
if w.Code != 500 {
|
||||
t.Log(w.Body.String())
|
||||
t.Fatalf("Status code is not 400, but %d", w.Code)
|
||||
t.Fatalf("Status code is not 500, but %d", w.Code)
|
||||
}
|
||||
|
||||
var myjson RespErrJSON
|
||||
|
@ -720,7 +688,7 @@ func TestPostEmptyJSONUpload(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if myjson.Error != "Empty file" {
|
||||
if myjson.Error != "Could not upload file: Empty file" {
|
||||
t.Fatal("Json 'error' was not 'Empty file' but " + myjson.Error)
|
||||
}
|
||||
}
|
||||
|
@ -729,7 +697,7 @@ func TestPutUpload(t *testing.T) {
|
|||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
filename := generateBarename() + ".file"
|
||||
filename := generateBarename() + ".ext"
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File content"))
|
||||
if err != nil {
|
||||
|
@ -747,7 +715,7 @@ func TestPutRandomizedUpload(t *testing.T) {
|
|||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
filename := generateBarename() + ".file"
|
||||
filename := generateBarename() + ".ext"
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File content"))
|
||||
if err != nil {
|
||||
|
@ -763,32 +731,6 @@ func TestPutRandomizedUpload(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestPutForceRandomUpload(t *testing.T) {
|
||||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
oldFRF := Config.forceRandomFilename
|
||||
Config.forceRandomFilename = true
|
||||
filename := "randomizeme.file"
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File content"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// while this should also work without this header, let's try to force
|
||||
// the randomized filename off to be sure
|
||||
req.Header.Set("Linx-Randomize", "no")
|
||||
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Body.String() == Config.siteURL+filename {
|
||||
t.Fatal("Filename was not random")
|
||||
}
|
||||
|
||||
Config.forceRandomFilename = oldFRF
|
||||
}
|
||||
|
||||
func TestPutNoExtensionUpload(t *testing.T) {
|
||||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
@ -813,7 +755,7 @@ func TestPutEmptyUpload(t *testing.T) {
|
|||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
filename := generateBarename() + ".file"
|
||||
filename := generateBarename() + ".ext"
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader(""))
|
||||
if err != nil {
|
||||
|
@ -824,48 +766,18 @@ func TestPutEmptyUpload(t *testing.T) {
|
|||
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != 400 {
|
||||
t.Fatalf("Status code is not 400, but %d", w.Code)
|
||||
if !strings.Contains(w.Body.String(), "Empty file") {
|
||||
t.Fatal("Response doesn't contain'Empty file'")
|
||||
}
|
||||
}
|
||||
|
||||
func TestPutTooLargeUpload(t *testing.T) {
|
||||
mux := setup()
|
||||
oldMaxSize := Config.maxSize
|
||||
Config.maxSize = 2
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
filename := generateBarename() + ".file"
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File too big"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
req.Header.Set("Linx-Randomize", "yes")
|
||||
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != 500 {
|
||||
t.Log(w.Body.String())
|
||||
t.Fatalf("Status code is not 500, but %d", w.Code)
|
||||
}
|
||||
|
||||
if !strings.Contains(w.Body.String(), "request body too large") {
|
||||
t.Fatal("Response did not contain 'request body too large'")
|
||||
}
|
||||
|
||||
Config.maxSize = oldMaxSize
|
||||
}
|
||||
|
||||
func TestPutJSONUpload(t *testing.T) {
|
||||
var myjson RespOkJSON
|
||||
|
||||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
filename := generateBarename() + ".file"
|
||||
filename := generateBarename() + ".ext"
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File content"))
|
||||
if err != nil {
|
||||
|
@ -892,7 +804,7 @@ func TestPutRandomizedJSONUpload(t *testing.T) {
|
|||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
filename := generateBarename() + ".file"
|
||||
filename := generateBarename() + ".ext"
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File content"))
|
||||
if err != nil {
|
||||
|
@ -920,7 +832,7 @@ func TestPutExpireJSONUpload(t *testing.T) {
|
|||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
filename := generateBarename() + ".file"
|
||||
filename := generateBarename() + ".ext"
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload/"+filename, strings.NewReader("File content"))
|
||||
if err != nil {
|
||||
|
@ -1027,7 +939,7 @@ func TestPutAndOverwrite(t *testing.T) {
|
|||
|
||||
// Make sure it's the new file
|
||||
w = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/"+Config.selifPath+myjson.Filename, nil)
|
||||
req, err = http.NewRequest("GET", "/selif/"+myjson.Filename, nil)
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Code == 404 {
|
||||
|
@ -1039,55 +951,6 @@ func TestPutAndOverwrite(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestPutAndOverwriteForceRandom(t *testing.T) {
|
||||
var myjson RespOkJSON
|
||||
|
||||
mux := setup()
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
oldFRF := Config.forceRandomFilename
|
||||
Config.forceRandomFilename = true
|
||||
|
||||
req, err := http.NewRequest("PUT", "/upload", strings.NewReader("File content"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
req.Header.Set("Accept", "application/json")
|
||||
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
err = json.Unmarshal([]byte(w.Body.String()), &myjson)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Overwrite it
|
||||
w = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("PUT", "/upload/"+myjson.Filename, strings.NewReader("New file content"))
|
||||
req.Header.Set("Linx-Delete-Key", myjson.Delete_Key)
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Code != 200 {
|
||||
t.Fatal("Status code was not 200, but " + strconv.Itoa(w.Code))
|
||||
}
|
||||
|
||||
// Make sure it's the new file
|
||||
w = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", "/"+Config.selifPath+myjson.Filename, nil)
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
if w.Code == 404 {
|
||||
t.Fatal("Status code was 404")
|
||||
}
|
||||
|
||||
if w.Body.String() != "New file content" {
|
||||
t.Fatal("File did not contain 'New file content")
|
||||
}
|
||||
|
||||
Config.forceRandomFilename = oldFRF
|
||||
}
|
||||
|
||||
func TestPutAndSpecificDelete(t *testing.T) {
|
||||
var myjson RespOkJSON
|
||||
|
||||
|
@ -1256,50 +1119,3 @@ func TestShutdown(t *testing.T) {
|
|||
os.RemoveAll(Config.filesDir)
|
||||
os.RemoveAll(Config.metaDir)
|
||||
}
|
||||
|
||||
func TestPutAndGetCLI(t *testing.T) {
|
||||
var myjson RespOkJSON
|
||||
mux := setup()
|
||||
|
||||
// upload file
|
||||
w := httptest.NewRecorder()
|
||||
req, err := http.NewRequest("PUT", "/upload", strings.NewReader("File content"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
req.Header.Set("Accept", "application/json")
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
err = json.Unmarshal([]byte(w.Body.String()), &myjson)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// request file without wget user agent
|
||||
w = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", myjson.Url, nil)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
contentType := w.Header().Get("Content-Type")
|
||||
if strings.HasPrefix(contentType, "text/plain") {
|
||||
t.Fatalf("Didn't receive file display page but %s", contentType)
|
||||
}
|
||||
|
||||
// request file with wget user agent
|
||||
w = httptest.NewRecorder()
|
||||
req, err = http.NewRequest("GET", myjson.Url, nil)
|
||||
req.Header.Set("User-Agent", "wget")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
mux.ServeHTTP(w, req)
|
||||
|
||||
contentType = w.Header().Get("Content-Type")
|
||||
if !strings.HasPrefix(contentType, "text/plain") {
|
||||
t.Fatalf("Didn't receive file directly but %s", contentType)
|
||||
}
|
||||
|
||||
}
|
||||
|
|
89
shorturl.go
Normal file
89
shorturl.go
Normal file
|
@ -0,0 +1,89 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"net/http"
|
||||
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
type shortenerRequest struct {
|
||||
LongURL string `json:"longUrl"`
|
||||
}
|
||||
|
||||
type shortenerResponse struct {
|
||||
Kind string `json:"kind"`
|
||||
ID string `json:"id"`
|
||||
LongURL string `json:"longUrl"`
|
||||
Error struct {
|
||||
Code int `json:"code"`
|
||||
Message string `json:"message"`
|
||||
} `json:"error"`
|
||||
}
|
||||
|
||||
func shortURLHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
fileName := c.URLParams["name"]
|
||||
|
||||
err := checkFile(fileName)
|
||||
if err == NotFoundErr {
|
||||
notFoundHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
metadata, err := metadataRead(fileName)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.")
|
||||
return
|
||||
}
|
||||
|
||||
if metadata.ShortURL == "" {
|
||||
url, err := shortenURL(getSiteURL(r) + fileName)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespJSON, err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
metadata.ShortURL = url
|
||||
|
||||
err = metadataWrite(fileName, &metadata)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespJSON, "Corrupt metadata.")
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
js, _ := json.Marshal(map[string]string{
|
||||
"shortUrl": metadata.ShortURL,
|
||||
})
|
||||
w.Write(js)
|
||||
return
|
||||
}
|
||||
|
||||
func shortenURL(url string) (string, error) {
|
||||
apiURL := "https://www.googleapis.com/urlshortener/v1/url?key=" + Config.googleShorterAPIKey
|
||||
jsonStr, _ := json.Marshal(shortenerRequest{LongURL: url})
|
||||
|
||||
req, err := http.NewRequest("POST", apiURL, bytes.NewBuffer(jsonStr))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
|
||||
client := &http.Client{}
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
shortenerResponse := new(shortenerResponse)
|
||||
err = json.NewDecoder(resp.Body).Decode(shortenerResponse)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
if shortenerResponse.Error.Message != "" {
|
||||
return "", errors.New(shortenerResponse.Error.Message)
|
||||
}
|
||||
|
||||
return shortenerResponse.ID, nil
|
||||
}
|
|
@ -31,25 +31,17 @@
|
|||
border: 2px solid #FAFBFC;
|
||||
}
|
||||
|
||||
#dropzone {
|
||||
width: 400px;
|
||||
#dropzone { width: 400px;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
@media(max-width: 450px) {
|
||||
#dropzone {
|
||||
width: auto;
|
||||
}
|
||||
}
|
||||
|
||||
#uploads {
|
||||
margin-top: 20px;
|
||||
}
|
||||
|
||||
div.dz-default {
|
||||
border: 2px dashed #C9C9C9;
|
||||
border-radius: 5px;
|
||||
color: #C9C9C9;
|
||||
font: 14px "helvetica neue",helvetica,arial,sans-serif;
|
||||
background-color: #FAFBFC;
|
||||
|
|
|
@ -8,8 +8,7 @@
|
|||
font-size: 12px;
|
||||
line-height: 1.6;
|
||||
word-wrap: break-word;
|
||||
width: 80vw;
|
||||
max-width: 680px;
|
||||
width: 680px;
|
||||
padding: 10px;
|
||||
}
|
||||
|
||||
|
@ -44,10 +43,6 @@
|
|||
overflow: auto;
|
||||
}
|
||||
|
||||
.markdown-body li {
|
||||
list-style: unset;
|
||||
}
|
||||
|
||||
.markdown-body code,
|
||||
.markdown-body kbd,
|
||||
.markdown-body pre {
|
||||
|
|
|
@ -2,71 +2,102 @@
|
|||
|
||||
/* Tomorrow Comment */
|
||||
.hljs-comment,
|
||||
.hljs-quote {
|
||||
.hljs-title {
|
||||
color: #8e908c;
|
||||
}
|
||||
|
||||
/* Tomorrow Red */
|
||||
.hljs-variable,
|
||||
.hljs-template-variable,
|
||||
.hljs-attribute,
|
||||
.hljs-tag,
|
||||
.hljs-name,
|
||||
.hljs-selector-id,
|
||||
.hljs-selector-class,
|
||||
.hljs-regexp,
|
||||
.hljs-deletion {
|
||||
.ruby .hljs-constant,
|
||||
.xml .hljs-tag .hljs-title,
|
||||
.xml .hljs-pi,
|
||||
.xml .hljs-doctype,
|
||||
.html .hljs-doctype,
|
||||
.css .hljs-id,
|
||||
.css .hljs-class,
|
||||
.css .hljs-pseudo {
|
||||
color: #c82829;
|
||||
}
|
||||
|
||||
/* Tomorrow Orange */
|
||||
.hljs-number,
|
||||
.hljs-preprocessor,
|
||||
.hljs-pragma,
|
||||
.hljs-built_in,
|
||||
.hljs-builtin-name,
|
||||
.hljs-literal,
|
||||
.hljs-type,
|
||||
.hljs-params,
|
||||
.hljs-meta,
|
||||
.hljs-link {
|
||||
.hljs-constant {
|
||||
color: #f5871f;
|
||||
}
|
||||
|
||||
/* Tomorrow Yellow */
|
||||
.hljs-attribute {
|
||||
.ruby .hljs-class .hljs-title,
|
||||
.css .hljs-rules .hljs-attribute {
|
||||
color: #eab700;
|
||||
}
|
||||
|
||||
/* Tomorrow Green */
|
||||
.hljs-string,
|
||||
.hljs-symbol,
|
||||
.hljs-bullet,
|
||||
.hljs-addition {
|
||||
.hljs-value,
|
||||
.hljs-inheritance,
|
||||
.hljs-header,
|
||||
.ruby .hljs-symbol,
|
||||
.xml .hljs-cdata {
|
||||
color: #718c00;
|
||||
}
|
||||
|
||||
/* Tomorrow Aqua */
|
||||
.css .hljs-hexcolor {
|
||||
color: #3e999f;
|
||||
}
|
||||
|
||||
/* Tomorrow Blue */
|
||||
.hljs-title,
|
||||
.hljs-section {
|
||||
.hljs-function,
|
||||
.python .hljs-decorator,
|
||||
.python .hljs-title,
|
||||
.ruby .hljs-function .hljs-title,
|
||||
.ruby .hljs-title .hljs-keyword,
|
||||
.perl .hljs-sub,
|
||||
.javascript .hljs-title,
|
||||
.coffeescript .hljs-title {
|
||||
color: #4271ae;
|
||||
}
|
||||
|
||||
/* Tomorrow Purple */
|
||||
.hljs-keyword,
|
||||
.hljs-selector-tag {
|
||||
.javascript .hljs-function {
|
||||
color: #8959a8;
|
||||
}
|
||||
|
||||
.hljs {
|
||||
display: block;
|
||||
overflow-x: auto;
|
||||
background: white;
|
||||
color: #4d4d4c;
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
.hljs-emphasis {
|
||||
font-style: italic;
|
||||
.hljs-line-numbers {
|
||||
text-align: right;
|
||||
border-right: 1px solid #ccc;
|
||||
margin-right: 5px;
|
||||
color: #999;
|
||||
-webkit-touch-callout: none;
|
||||
-webkit-user-select: none;
|
||||
-khtml-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.hljs-strong {
|
||||
font-weight: bold;
|
||||
}
|
||||
.coffeescript .javascript,
|
||||
.javascript .xml,
|
||||
.tex .hljs-formula,
|
||||
.xml .javascript,
|
||||
.xml .vbscript,
|
||||
.xml .css,
|
||||
.xml .hljs-cdata {
|
||||
opacity: 0.5;
|
||||
}
|
|
@ -1,56 +1,56 @@
|
|||
body {
|
||||
background-color: #E8ECF0;
|
||||
color: #556A7F;
|
||||
background-color: #E8ECF0;
|
||||
color: #556A7F;
|
||||
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
font-size: 14px;
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
#container_container {
|
||||
display: table;
|
||||
table-layout: fixed;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
display: table;
|
||||
table-layout: fixed;
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
|
||||
#container {
|
||||
display: table-cell;
|
||||
min-width: 200px;
|
||||
display: table-cell;
|
||||
min-width: 200px;
|
||||
}
|
||||
|
||||
#header a {
|
||||
text-decoration: none;
|
||||
color: #556A7F;
|
||||
text-decoration: none;
|
||||
color: #556A7F;
|
||||
}
|
||||
|
||||
#navigation {
|
||||
margin-top: 4px;
|
||||
margin-top: 4px;
|
||||
}
|
||||
|
||||
#navigation a {
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #556A7F;
|
||||
color: #556A7F;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted #556A7F;
|
||||
color: #556A7F;
|
||||
}
|
||||
|
||||
#navigation a:hover {
|
||||
background-color: #C7D1EB;
|
||||
background-color: #C7D1EB;
|
||||
}
|
||||
|
||||
#main {
|
||||
background-color: white;
|
||||
background-color: white;
|
||||
|
||||
padding: 6px 5px 8px 5px;
|
||||
padding: 6px 5px 8px 5px;
|
||||
|
||||
-moz-box-shadow: 1px 1px 1px 1px #ccc;
|
||||
-webkit-box-shadow: 1px 1px 1px 1px #ccc;
|
||||
box-shadow: 1px 1px 1px 1px #ccc;
|
||||
-moz-box-shadow: 1px 1px 1px 1px #ccc;
|
||||
-webkit-box-shadow: 1px 1px 1px 1px #ccc;
|
||||
box-shadow: 1px 1px 1px 1px #ccc;
|
||||
|
||||
text-align: center;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
#main a {
|
||||
color: #556A7F;
|
||||
color: #556A7F;
|
||||
}
|
||||
|
||||
#normal-content {
|
||||
|
@ -62,29 +62,28 @@ body {
|
|||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.ninfo {
|
||||
margin-bottom: 5px;
|
||||
}
|
||||
|
||||
.dinfo {
|
||||
-moz-box-shadow: 1px 1px 1px 1px #ccc;
|
||||
-webkit-box-shadow: 1px 1px 1px 1px #ccc;
|
||||
box-shadow: 1px 1px 1px 1px #ccc;
|
||||
margin-bottom: 15px;
|
||||
|
||||
}
|
||||
|
||||
#info {
|
||||
text-align: left;
|
||||
|
||||
background-color: white;
|
||||
padding: 5px;
|
||||
padding: 5px 5px 5px 5px;
|
||||
}
|
||||
|
||||
.info-flex {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
align-items: baseline;
|
||||
justify-content: space-between;
|
||||
}
|
||||
|
||||
.info-actions {
|
||||
margin-left: 15px;
|
||||
font-size: 13px;
|
||||
text-align: right;
|
||||
#info #filename,
|
||||
#editform #filename {
|
||||
width: 232px;
|
||||
}
|
||||
|
||||
#info #extension,
|
||||
|
@ -92,6 +91,15 @@ body {
|
|||
width: 40px;
|
||||
}
|
||||
|
||||
#info .float-left {
|
||||
margin-top: 2px;
|
||||
margin-right: 20px;
|
||||
}
|
||||
|
||||
#info .right {
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
#info a {
|
||||
text-decoration: none;
|
||||
color: #556A7F;
|
||||
|
@ -102,97 +110,88 @@ body {
|
|||
background-color: #E8ECF0;
|
||||
}
|
||||
|
||||
#info input[type=checkbox] {
|
||||
margin: 0;
|
||||
vertical-align: bottom;
|
||||
#info input[type=text] {
|
||||
border: 0;
|
||||
color: #556A7F;
|
||||
}
|
||||
|
||||
#footer {
|
||||
color: gray;
|
||||
text-align: right;
|
||||
margin-top: 30px;
|
||||
margin-bottom: 10px;
|
||||
font-size: 11px;
|
||||
color: gray;
|
||||
text-align: right;
|
||||
margin-top: 30px;
|
||||
margin-bottom: 10px;
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
#footer a {
|
||||
color: gray;
|
||||
text-decoration: none;
|
||||
color: gray;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
|
||||
.normal {
|
||||
text-align: left;
|
||||
font-size: 13px;
|
||||
text-align: left;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.normal a {
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted gray;
|
||||
text-decoration: none;
|
||||
border-bottom: 1px dotted gray;
|
||||
}
|
||||
|
||||
.normal a:hover {
|
||||
color: black;
|
||||
background-color: #E8ECF0;
|
||||
color: black;
|
||||
background-color: #E8ECF0;
|
||||
}
|
||||
|
||||
.normal ul {
|
||||
padding-left: 15px;
|
||||
padding-left: 15px;
|
||||
}
|
||||
|
||||
.normal li {
|
||||
margin-bottom: 3px;
|
||||
list-style: none;
|
||||
margin-bottom: 3px;
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.normal li a {
|
||||
font-weight: bold;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.fixed {
|
||||
width: 80vw;
|
||||
max-width: 800px;
|
||||
}
|
||||
|
||||
.paste {
|
||||
width: 70vw;
|
||||
max-width: 700px;
|
||||
width: 800px;
|
||||
}
|
||||
|
||||
.needs-border {
|
||||
border-top: 1px solid rgb(214, 214, 214);
|
||||
border-top: 1px solid rgb(214, 214, 214);
|
||||
}
|
||||
|
||||
.left {
|
||||
text-align: left;
|
||||
text-align: left;
|
||||
}
|
||||
|
||||
.float-left {
|
||||
float: left;
|
||||
}
|
||||
|
||||
.pad-left {
|
||||
padding-left: 10px;
|
||||
float: left;
|
||||
}
|
||||
|
||||
.pad-right {
|
||||
padding-right: 10px;
|
||||
padding-right: 10px;
|
||||
}
|
||||
|
||||
.text-right {
|
||||
text-align: right;
|
||||
text-align: right;
|
||||
}
|
||||
|
||||
.center {
|
||||
text-align: center;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
|
||||
.float-right, .right {
|
||||
float: right;
|
||||
float: right;
|
||||
}
|
||||
|
||||
.clear {
|
||||
clear: both;
|
||||
clear: both;
|
||||
}
|
||||
|
||||
#upload_header {
|
||||
|
@ -246,42 +245,19 @@ body {
|
|||
}
|
||||
|
||||
#choices {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
flex-wrap: wrap;
|
||||
justify-content: space-between;
|
||||
width: 100%;
|
||||
margin-top: 5px;
|
||||
font-size: 13px;
|
||||
float: left;
|
||||
width: 100%;
|
||||
text-align: left;
|
||||
vertical-align: bottom;
|
||||
margin-top: 5px;
|
||||
font-size:13px;
|
||||
}
|
||||
|
||||
#expiry {
|
||||
float: right;
|
||||
padding-top: 1px;
|
||||
}
|
||||
|
||||
#randomize {
|
||||
vertical-align: bottom;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
#access_key {
|
||||
min-width: 100%;
|
||||
line-height: 1.3em;
|
||||
}
|
||||
|
||||
#access_key input, span {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
#access_key_checkbox {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
#access_key_input {
|
||||
padding: 0;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.oopscontent {
|
||||
width: 400px;
|
||||
}
|
||||
|
@ -291,35 +267,13 @@ body {
|
|||
border: 0;
|
||||
}
|
||||
|
||||
.error-404 img {
|
||||
max-width: 90vw;
|
||||
}
|
||||
|
||||
.padme {
|
||||
padding-left: 5px;
|
||||
padding-right: 5px;
|
||||
}
|
||||
|
||||
.editor {
|
||||
width: 100%;
|
||||
height: 450px;
|
||||
border: 1px solid #eaeaea;
|
||||
font-family: monospace;
|
||||
resize: none;
|
||||
overflow: auto;
|
||||
border-radius: 2px;
|
||||
padding: 2px;
|
||||
box-sizing: border-box;
|
||||
-webkit-box-sizing: border-box;
|
||||
-moz-box-sizing: border-box;
|
||||
}
|
||||
|
||||
|
||||
#info input[type=text] {
|
||||
border: 1px solid #eaeaea;
|
||||
color: #556A7F;
|
||||
padding: 2px 4px;
|
||||
font-family: Arial, Helvetica, sans-serif;
|
||||
width: 705px;
|
||||
height: 450px;
|
||||
border-color: #cccccc;
|
||||
font-family: monospace;
|
||||
resize: none;
|
||||
overflow: auto;
|
||||
}
|
||||
|
||||
.storygreen {
|
||||
|
@ -333,7 +287,7 @@ body {
|
|||
/* Content display {{{ */
|
||||
.display-audio,
|
||||
.display-file {
|
||||
width: 100%;
|
||||
width: 500px;
|
||||
}
|
||||
|
||||
.display-image {
|
||||
|
@ -348,7 +302,6 @@ body {
|
|||
|
||||
.display-video {
|
||||
width: 800px;
|
||||
max-height: 70vh;
|
||||
}
|
||||
|
||||
.scrollable {
|
||||
|
@ -362,17 +315,16 @@ body {
|
|||
#editform,
|
||||
#editform .editor {
|
||||
display: none;
|
||||
width: 100%
|
||||
}
|
||||
|
||||
#codeb {
|
||||
white-space: pre-wrap;
|
||||
}
|
||||
|
||||
#inplace-editor {
|
||||
#editor {
|
||||
display: none;
|
||||
width: 100%;
|
||||
width: 794px;
|
||||
height: 800px;
|
||||
font-size: 13px;
|
||||
}
|
||||
/* }}} */
|
||||
/* }}} */
|
Binary file not shown.
Before Width: | Height: | Size: 19 KiB After Width: | Height: | Size: 18 KiB |
116
static/js/bin.js
116
static/js/bin.js
|
@ -1,58 +1,58 @@
|
|||
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
|
||||
|
||||
var navlist = document.getElementById("info").getElementsByClassName("info-actions")[0];
|
||||
|
||||
init();
|
||||
|
||||
function init() {
|
||||
var editA = document.createElement('a');
|
||||
|
||||
editA.setAttribute("href", "#");
|
||||
editA.addEventListener('click', function(ev) {
|
||||
edit(ev);
|
||||
return false;
|
||||
});
|
||||
editA.innerHTML = "edit";
|
||||
|
||||
var separator = document.createTextNode(" | ");
|
||||
navlist.insertBefore(editA, navlist.firstChild);
|
||||
navlist.insertBefore(separator, navlist.children[1]);
|
||||
|
||||
document.getElementById('save').addEventListener('click', paste);
|
||||
document.getElementById('wordwrap').addEventListener('click', wrap);
|
||||
}
|
||||
|
||||
function edit(ev) {
|
||||
ev.preventDefault();
|
||||
|
||||
navlist.remove();
|
||||
document.getElementById("filename").remove();
|
||||
document.getElementById("editform").style.display = "block";
|
||||
|
||||
var normalcontent = document.getElementById("normal-content");
|
||||
normalcontent.removeChild(document.getElementById("normal-code"));
|
||||
|
||||
var editordiv = document.getElementById("inplace-editor");
|
||||
editordiv.style.display = "block";
|
||||
editordiv.addEventListener('keydown', handleTab);
|
||||
}
|
||||
|
||||
function paste(ev) {
|
||||
var editordiv = document.getElementById("inplace-editor");
|
||||
document.getElementById("newcontent").value = editordiv.value;
|
||||
document.forms["reply"].submit();
|
||||
}
|
||||
|
||||
function wrap(ev) {
|
||||
if (document.getElementById("wordwrap").checked) {
|
||||
document.getElementById("codeb").style.wordWrap = "break-word";
|
||||
document.getElementById("codeb").style.whiteSpace = "pre-wrap";
|
||||
}
|
||||
|
||||
else {
|
||||
document.getElementById("codeb").style.wordWrap = "normal";
|
||||
document.getElementById("codeb").style.whiteSpace = "pre";
|
||||
}
|
||||
}
|
||||
|
||||
// @license-end
|
||||
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
|
||||
|
||||
var navlist = document.getElementById("info").getElementsByClassName("right")[0];
|
||||
|
||||
init();
|
||||
|
||||
function init() {
|
||||
var editA = document.createElement('a');
|
||||
|
||||
editA.setAttribute("href", "#");
|
||||
editA.addEventListener('click', function(ev) {
|
||||
edit(ev);
|
||||
return false;
|
||||
});
|
||||
editA.innerHTML = "edit";
|
||||
|
||||
var separator = document.createTextNode(" | ");
|
||||
navlist.insertBefore(editA, navlist.firstChild);
|
||||
navlist.insertBefore(separator, navlist.children[1]);
|
||||
|
||||
document.getElementById('save').addEventListener('click', paste);
|
||||
document.getElementById('wordwrap').addEventListener('click', wrap);
|
||||
}
|
||||
|
||||
function edit(ev) {
|
||||
ev.preventDefault();
|
||||
|
||||
navlist.remove();
|
||||
document.getElementById("filename").remove();
|
||||
document.getElementById("editform").style.display = "block";
|
||||
|
||||
var normalcontent = document.getElementById("normal-content");
|
||||
normalcontent.removeChild(document.getElementById("normal-code"));
|
||||
|
||||
var editordiv = document.getElementById("editor");
|
||||
editordiv.style.display = "block";
|
||||
editordiv.addEventListener('keydown', handleTab);
|
||||
}
|
||||
|
||||
function paste(ev) {
|
||||
var editordiv = document.getElementById("editor");
|
||||
document.getElementById("newcontent").value = editordiv.value;
|
||||
document.forms["reply"].submit();
|
||||
}
|
||||
|
||||
function wrap(ev) {
|
||||
if (document.getElementById("wordwrap").checked) {
|
||||
document.getElementById("codeb").style.wordWrap = "break-word";
|
||||
document.getElementById("codeb").style.whiteSpace = "pre-wrap";
|
||||
}
|
||||
|
||||
else {
|
||||
document.getElementById("codeb").style.wordWrap = "normal";
|
||||
document.getElementById("codeb").style.whiteSpace = "pre";
|
||||
}
|
||||
}
|
||||
|
||||
// @license-end
|
||||
|
|
File diff suppressed because one or more lines are too long
39
static/js/shorturl.js
Normal file
39
static/js/shorturl.js
Normal file
|
@ -0,0 +1,39 @@
|
|||
document.getElementById('shorturl').addEventListener('click', function (e) {
|
||||
e.preventDefault();
|
||||
|
||||
if (e.target.href !== "") return;
|
||||
|
||||
xhr = new XMLHttpRequest();
|
||||
xhr.open("GET", e.target.dataset.url, true);
|
||||
xhr.setRequestHeader('Accept', 'application/json');
|
||||
xhr.onreadystatechange = function () {
|
||||
if (xhr.readyState === 4) {
|
||||
var resp = JSON.parse(xhr.responseText);
|
||||
|
||||
if (xhr.status === 200 && resp.error == null) {
|
||||
e.target.innerText = resp.shortUrl;
|
||||
e.target.href = resp.shortUrl;
|
||||
e.target.setAttribute('aria-label', 'Click to copy into clipboard')
|
||||
} else {
|
||||
e.target.setAttribute('aria-label', resp.error)
|
||||
}
|
||||
}
|
||||
};
|
||||
xhr.send();
|
||||
});
|
||||
|
||||
var clipboard = new Clipboard("#shorturl", {
|
||||
text: function (trigger) {
|
||||
if (trigger.href == null) return;
|
||||
|
||||
return trigger.href;
|
||||
}
|
||||
});
|
||||
|
||||
clipboard.on('success', function (e) {
|
||||
e.trigger.setAttribute('aria-label', 'Successfully copied')
|
||||
});
|
||||
|
||||
clipboard.on('error', function (e) {
|
||||
e.trigger.setAttribute('aria-label', 'Your browser does not support coping to clipboard')
|
||||
});
|
|
@ -1,71 +1,51 @@
|
|||
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
|
||||
|
||||
Dropzone.options.dropzone = {
|
||||
init: function () {
|
||||
var dzone = document.getElementById("dzone");
|
||||
dzone.style.display = "block";
|
||||
},
|
||||
addedfile: function (file) {
|
||||
if (!this.options.autoProcessQueue) {
|
||||
var dropzone = this;
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.onload = function () {
|
||||
if (xhr.readyState !== XMLHttpRequest.DONE) {
|
||||
return;
|
||||
}
|
||||
if (xhr.status < 400) {
|
||||
dropzone.processQueue()
|
||||
dropzone.options.autoProcessQueue = true;
|
||||
} else {
|
||||
dropzone.cancelUpload(file)
|
||||
}
|
||||
};
|
||||
xhr.open("HEAD", "auth", true);
|
||||
xhr.send()
|
||||
}
|
||||
var upload = document.createElement("div");
|
||||
upload.className = "upload";
|
||||
init: function() {
|
||||
var dzone = document.getElementById("dzone");
|
||||
dzone.style.display = "block";
|
||||
},
|
||||
addedfile: function(file) {
|
||||
var upload = document.createElement("div");
|
||||
upload.className = "upload";
|
||||
|
||||
var fileLabel = document.createElement("span");
|
||||
fileLabel.innerHTML = file.name;
|
||||
file.fileLabel = fileLabel;
|
||||
upload.appendChild(fileLabel);
|
||||
var fileLabel = document.createElement("span");
|
||||
fileLabel.innerHTML = file.name;
|
||||
file.fileLabel = fileLabel;
|
||||
upload.appendChild(fileLabel);
|
||||
|
||||
var fileActions = document.createElement("div");
|
||||
fileActions.className = "right";
|
||||
file.fileActions = fileActions;
|
||||
upload.appendChild(fileActions);
|
||||
var fileActions = document.createElement("div");
|
||||
fileActions.className = "right";
|
||||
file.fileActions = fileActions;
|
||||
upload.appendChild(fileActions);
|
||||
|
||||
var cancelAction = document.createElement("span");
|
||||
cancelAction.className = "cancel";
|
||||
cancelAction.innerHTML = "Cancel";
|
||||
cancelAction.addEventListener('click', function (ev) {
|
||||
this.removeFile(file);
|
||||
}.bind(this));
|
||||
file.cancelActionElement = cancelAction;
|
||||
fileActions.appendChild(cancelAction);
|
||||
var cancelAction = document.createElement("span");
|
||||
cancelAction.className = "cancel";
|
||||
cancelAction.innerHTML = "Cancel";
|
||||
cancelAction.addEventListener('click', function(ev) {
|
||||
this.removeFile(file);
|
||||
}.bind(this));
|
||||
file.cancelActionElement = cancelAction;
|
||||
fileActions.appendChild(cancelAction);
|
||||
|
||||
var progress = document.createElement("span");
|
||||
file.progressElement = progress;
|
||||
fileActions.appendChild(progress);
|
||||
var progress = document.createElement("span");
|
||||
file.progressElement = progress;
|
||||
fileActions.appendChild(progress);
|
||||
|
||||
file.uploadElement = upload;
|
||||
file.uploadElement = upload;
|
||||
|
||||
document.getElementById("uploads").appendChild(upload);
|
||||
},
|
||||
uploadprogress: function (file, p, bytesSent) {
|
||||
p = parseInt(p);
|
||||
file.progressElement.innerHTML = p + "%";
|
||||
file.uploadElement.setAttribute("style", 'background-image: -webkit-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -moz-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -ms-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -o-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%)');
|
||||
},
|
||||
sending: function (file, xhr, formData) {
|
||||
var randomize = document.getElementById("randomize");
|
||||
if (randomize != null) {
|
||||
formData.append("randomize", randomize.checked);
|
||||
}
|
||||
formData.append("expires", document.getElementById("expires").value);
|
||||
},
|
||||
success: function (file, resp) {
|
||||
document.getElementById("uploads").appendChild(upload);
|
||||
},
|
||||
uploadprogress: function(file, p, bytesSent) {
|
||||
p = parseInt(p);
|
||||
file.progressElement.innerHTML = p + "%";
|
||||
file.uploadElement.setAttribute("style", 'background-image: -webkit-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -moz-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -ms-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -o-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%)');
|
||||
},
|
||||
sending: function(file, xhr, formData) {
|
||||
formData.append("randomize", document.getElementById("randomize").checked);
|
||||
formData.append("expires", document.getElementById("expires").selectedOptions[0].value);
|
||||
},
|
||||
success: function(file, resp) {
|
||||
file.fileActions.removeChild(file.progressElement);
|
||||
|
||||
var fileLabelLink = document.createElement("a");
|
||||
|
@ -79,76 +59,51 @@ Dropzone.options.dropzone = {
|
|||
var deleteAction = document.createElement("span");
|
||||
deleteAction.innerHTML = "Delete";
|
||||
deleteAction.className = "cancel";
|
||||
deleteAction.addEventListener('click', function (ev) {
|
||||
xhr = new XMLHttpRequest();
|
||||
xhr.open("DELETE", resp.url, true);
|
||||
xhr.setRequestHeader("Linx-Delete-Key", resp.delete_key);
|
||||
xhr.onreadystatechange = function (file) {
|
||||
if (xhr.readyState == 4 && xhr.status === 200) {
|
||||
var text = document.createTextNode("Deleted ");
|
||||
file.fileLabel.insertBefore(text, file.fileLabelLink);
|
||||
file.fileLabel.className = "deleted";
|
||||
file.fileActions.removeChild(file.cancelActionElement);
|
||||
}
|
||||
}.bind(this, file);
|
||||
xhr.send();
|
||||
});
|
||||
file.fileActions.removeChild(file.cancelActionElement);
|
||||
file.cancelActionElement = deleteAction;
|
||||
file.fileActions.appendChild(deleteAction);
|
||||
},
|
||||
canceled: function (file) {
|
||||
this.options.error(file);
|
||||
},
|
||||
error: function (file, resp, xhrO) {
|
||||
deleteAction.addEventListener('click', function(ev) {
|
||||
xhr = new XMLHttpRequest();
|
||||
xhr.open("DELETE", resp.url, true);
|
||||
xhr.setRequestHeader("Linx-Delete-Key", resp.delete_key);
|
||||
xhr.onreadystatechange = function(file) {
|
||||
if (xhr.readyState == 4 && xhr.status === 200) {
|
||||
var text = document.createTextNode("Deleted ");
|
||||
file.fileLabel.insertBefore(text, file.fileLabelLink);
|
||||
file.fileLabel.className = "deleted";
|
||||
file.fileActions.removeChild(file.cancelActionElement);
|
||||
}
|
||||
}.bind(this, file);
|
||||
xhr.send();
|
||||
});
|
||||
file.fileActions.removeChild(file.cancelActionElement);
|
||||
file.cancelActionElement = deleteAction;
|
||||
file.fileActions.appendChild(deleteAction);
|
||||
},
|
||||
error: function(file, resp, xhrO) {
|
||||
file.fileActions.removeChild(file.cancelActionElement);
|
||||
file.fileActions.removeChild(file.progressElement);
|
||||
|
||||
if (file.status === "canceled") {
|
||||
file.fileLabel.innerHTML = file.name + ": Canceled ";
|
||||
}
|
||||
else {
|
||||
if (resp.error) {
|
||||
file.fileLabel.innerHTML = file.name + ": " + resp.error;
|
||||
}
|
||||
else if (resp.includes("<html")) {
|
||||
file.fileLabel.innerHTML = file.name + ": Server Error";
|
||||
}
|
||||
else {
|
||||
file.fileLabel.innerHTML = file.name + ": " + resp;
|
||||
}
|
||||
}
|
||||
file.fileLabel.className = "error";
|
||||
},
|
||||
if (file.status === "canceled") {
|
||||
file.fileLabel.innerHTML = file.name + ": Canceled ";
|
||||
}
|
||||
else {
|
||||
if (resp.error) {
|
||||
file.fileLabel.innerHTML = file.name + ": " + resp.error;
|
||||
}
|
||||
else if (resp.includes("<html")) {
|
||||
file.fileLabel.innerHTML = file.name + ": Server Error";
|
||||
}
|
||||
else {
|
||||
file.fileLabel.innerHTML = file.name + ": " + resp;
|
||||
}
|
||||
}
|
||||
file.fileLabel.className = "error";
|
||||
},
|
||||
|
||||
autoProcessQueue: document.getElementById("dropzone").getAttribute("data-auth") !== "basic",
|
||||
maxFilesize: Math.round(parseInt(document.getElementById("dropzone").getAttribute("data-maxsize"), 10) / 1024 / 1024),
|
||||
previewsContainer: "#uploads",
|
||||
parallelUploads: 5,
|
||||
headers: { "Accept": "application/json" },
|
||||
dictDefaultMessage: "Click or Drop file(s) or Paste image",
|
||||
dictFallbackMessage: ""
|
||||
};
|
||||
|
||||
document.onpaste = function (event) {
|
||||
var items = (event.clipboardData || event.originalEvent.clipboardData).items;
|
||||
for (index in items) {
|
||||
var item = items[index];
|
||||
if (item.kind === "file") {
|
||||
Dropzone.forElement("#dropzone").addFile(item.getAsFile());
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
document.getElementById("access_key_checkbox").onchange = function (event) {
|
||||
if (event.target.checked) {
|
||||
document.getElementById("access_key_input").style.display = "inline-block";
|
||||
document.getElementById("access_key_text").style.display = "none";
|
||||
} else {
|
||||
document.getElementById("access_key_input").value = "";
|
||||
document.getElementById("access_key_input").style.display = "none";
|
||||
document.getElementById("access_key_text").style.display = "inline-block";
|
||||
}
|
||||
previewsContainer: "#uploads",
|
||||
parallelUploads: 5,
|
||||
headers: {"Accept": "application/json"},
|
||||
dictDefaultMessage: "Click or Drop file(s)",
|
||||
dictFallbackMessage: ""
|
||||
};
|
||||
|
||||
// @end-license
|
||||
|
|
17
templates.go
17
templates.go
|
@ -8,7 +8,7 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
rice "github.com/GeertJohan/go.rice"
|
||||
"github.com/GeertJohan/go.rice"
|
||||
"github.com/flosch/pongo2"
|
||||
)
|
||||
|
||||
|
@ -51,8 +51,6 @@ func populateTemplatesMap(tSet *pongo2.TemplateSet, tMap map[string]*pongo2.Temp
|
|||
"401.html",
|
||||
"404.html",
|
||||
"oops.html",
|
||||
"access.html",
|
||||
"custom_page.html",
|
||||
|
||||
"display/audio.html",
|
||||
"display/image.html",
|
||||
|
@ -85,18 +83,7 @@ func renderTemplate(tpl *pongo2.Template, context pongo2.Context, r *http.Reques
|
|||
}
|
||||
|
||||
context["sitepath"] = Config.sitePath
|
||||
context["selifpath"] = Config.selifPath
|
||||
context["custom_pages_names"] = customPagesNames
|
||||
|
||||
var a string
|
||||
if Config.authFile == "" {
|
||||
a = "none"
|
||||
} else if Config.basicAuth {
|
||||
a = "basic"
|
||||
} else {
|
||||
a = "header"
|
||||
}
|
||||
context["auth"] = a
|
||||
context["using_auth"] = Config.authFile != ""
|
||||
|
||||
return tpl.ExecuteWriter(context, writer)
|
||||
}
|
||||
|
|
|
@ -1,9 +1,5 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - 404 Not Found{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="error-404">
|
||||
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a>
|
||||
</div>
|
||||
{% endblock %}
|
||||
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,110 +1,97 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - API{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<link href="{{ sitepath }}static/css/github-markdown.css" rel="stylesheet" type="text/css">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="main">
|
||||
<div id='inner_content'>
|
||||
<div id='inner_content'>
|
||||
<div class="normal markdown-body">
|
||||
|
||||
<h2>API</h2>
|
||||
|
||||
<h3>Client</h3>
|
||||
<p>To simplify uploading and deleting files, you can use <a target="_blank"
|
||||
href="https://github.com/andreimarcu/linx-client">linx-client</a>, which uses this API.</p>
|
||||
<p>To simplify uploading and deleting files, you can use <a target="_blank" href="https://github.com/andreimarcu/linx-client">linx-client</a>, which uses this API.</p>
|
||||
|
||||
{% if auth != "none" %}
|
||||
{% if using_auth %}
|
||||
<h3>Keys</h3>
|
||||
<p>This instance uses API Keys, therefore you will need to provide a key for uploading and deleting
|
||||
files.<br /> To do so, add the <code>Linx-Api-Key</code> header with your key.</p>
|
||||
<p>This instance uses API Keys, therefore you will need to provide a key for uploading and deleting files.<br/> To do so, add the <code>Linx-Api-Key</code> header with your key.</p>
|
||||
{% endif %}
|
||||
|
||||
<h3>Uploading a file</h3>
|
||||
|
||||
<p>To upload a file, make a PUT request to <code>{{ siteurl }}upload/</code> and you will get the url of
|
||||
your upload back.</p>
|
||||
<p>To upload a file, make a PUT request to <code>{{ siteurl }}upload/</code> and you will get the url of your upload back.</p>
|
||||
|
||||
<p><strong>Optional headers with the request</strong></p>
|
||||
|
||||
{% if not forcerandom %}
|
||||
<p>Randomize the filename<br />
|
||||
<code>Linx-Randomize: yes</code></p>
|
||||
{% endif %}
|
||||
<p>Randomize the filename<br/>
|
||||
<code>Linx-Randomize: yes</code></p>
|
||||
|
||||
<p>Specify a custom deletion key<br />
|
||||
<code>Linx-Delete-Key: mysecret</code></p>
|
||||
<p>Specify a custom deletion key<br/>
|
||||
<code>Linx-Delete-Key: mysecret</code></p>
|
||||
|
||||
<p>Protect file with password<br />
|
||||
<code>Linx-Access-Key: mysecret</code></p>
|
||||
<p>Specify an expiration time (in seconds)<br/>
|
||||
<code>Linx-Expiry: 60</code></p>
|
||||
|
||||
<p>Specify an expiration time (in seconds)<br />
|
||||
<code>Linx-Expiry: 60</code></p>
|
||||
|
||||
<p>Get a json response<br />
|
||||
<code>Accept: application/json</code></p>
|
||||
<p>Get a json response<br/>
|
||||
<code>Accept: application/json</code></p>
|
||||
|
||||
<p>The json response will then contain:</p>
|
||||
|
||||
<blockquote>
|
||||
<p>“url”: the publicly available upload url<br />
|
||||
“direct_url”: the url to access the file directly<br />
|
||||
“filename”: the (optionally generated) filename<br />
|
||||
“delete_key”: the (optionally generated) deletion key,<br />
|
||||
“access_key”: the (optionally supplied) access key,<br />
|
||||
“expiry”: the unix timestamp at which the file will expire (0 if never)<br />
|
||||
“size”: the size in bytes of the file<br />
|
||||
“mimetype”: the guessed mimetype of the file<br />
|
||||
“sha256sum”: the sha256sum of the file,</p>
|
||||
<p>“url”: the publicly available upload url<br/>
|
||||
“filename”: the (optionally generated) filename<br/>
|
||||
“delete_key”: the (optionally generated) deletion key,<br/>
|
||||
“expiry”: the unix timestamp at which the file will expire (0 if never)<br/>
|
||||
“size”: the size in bytes of the file<br/>
|
||||
“mimetype”: the guessed mimetype of the file<br/>
|
||||
“sha256sum”: the sha256sum of the file,</p>
|
||||
</blockquote>
|
||||
|
||||
<p><strong>Examples</strong></p>
|
||||
|
||||
<p>Uploading myphoto.jpg</p>
|
||||
|
||||
{% if auth != "none" %}
|
||||
{% if using_auth %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}7z4h4ut.jpg{% endif %}</code></pre>
|
||||
{{ siteurl }}myphoto.jpg</code></pre>
|
||||
{% else %}
|
||||
<pre><code>$ curl -T myphoto.jpg {{ siteurl }}upload/
|
||||
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}wtq7pan.jpg{% endif %}</code></pre>
|
||||
{{ siteurl }}myphoto.jpg</code></pre>
|
||||
{% endif %}
|
||||
|
||||
<p>Uploading myphoto.jpg with an expiry of 20 minutes</p>
|
||||
|
||||
{% if auth != "none" %}
|
||||
{% if using_auth %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Linx-Expiry: 1200" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}jm295snf.jpg{% endif %}</code></pre>
|
||||
{{ siteurl }}myphoto.jpg</code></pre>
|
||||
{% else %}
|
||||
<pre><code>$ curl -H "Linx-Expiry: 1200" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}1doym9u2.jpg{% endif %}</code></pre>
|
||||
{{ siteurl }}myphoto.jpg</code></pre>
|
||||
{% endif %}
|
||||
|
||||
<p>Uploading myphoto.jpg with a random filename and getting a json response:</p>
|
||||
|
||||
{% if auth != "none" %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Accept: application/json"{% if not forcerandom %} -H "Linx-Randomize: yes"{% endif %} -T myphoto.jpg {{ siteurl }}upload/
|
||||
{% if using_auth %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Accept: application/json" -H "Linx-Randomize: yes" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{"delete_key":"...","expiry":"0","filename":"f34h4iu.jpg","mimetype":"image/jpeg",
|
||||
"sha256sum":"...","size":"...","url":"{{ siteurl }}f34h4iu.jpg"}</code></pre>
|
||||
{% else %}
|
||||
<pre><code>$ curl -H "Accept: application/json"{% if not forcerandom %} -H "Linx-Randomize: yes"{% endif %} -T myphoto.jpg {{ siteurl }}upload/
|
||||
<pre><code>$ curl -H "Accept: application/json" -H "Linx-Randomize: yes" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{"delete_key":"...","expiry":"0","filename":"f34h4iu.jpg","mimetype":"image/jpeg",
|
||||
"sha256sum":"...","size":"...","url":"{{ siteurl }}f34h4iu.jpg"}</code></pre>
|
||||
{% endif %}
|
||||
|
||||
<h3>Overwriting a file</h3>
|
||||
|
||||
<p>To overwrite a file you uploaded, simply provide the <code>Linx-Delete-Key</code> header with the
|
||||
original file's deletion key.</p>
|
||||
<p>To overwrite a file you uploaded, simply provide the <code>Linx-Delete-Key</code> header with the original file's deletion key.</p>
|
||||
|
||||
<p><strong>Example</p></strong>
|
||||
|
||||
<p>To overwrite myphoto.jpg</p>
|
||||
|
||||
{% if auth != "none" %}
|
||||
{% if using_auth %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Linx-Delete-Key: mysecret" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{{ siteurl }}myphoto.jpg</code></pre>
|
||||
{% else %}
|
||||
|
@ -114,14 +101,13 @@
|
|||
|
||||
<h3>Deleting a file</h3>
|
||||
|
||||
<p>To delete a file you uploaded, make a DELETE request to <code>{{ siteurl }}yourfile.ext</code> with the
|
||||
delete key set as the <code>Linx-Delete-Key</code> header.</p>
|
||||
<p>To delete a file you uploaded, make a DELETE request to <code>{{ siteurl }}yourfile.ext</code> with the delete key set as the <code>Linx-Delete-Key</code> header.</p>
|
||||
|
||||
<p><strong>Example</strong></p>
|
||||
|
||||
<p>To delete myphoto.jpg</p>
|
||||
|
||||
{% if auth != "none" %}
|
||||
{% if using_auth %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Linx-Delete-Key: mysecret" -X DELETE {{ siteurl }}myphoto.jpg
|
||||
DELETED</code></pre>
|
||||
{% else %}
|
||||
|
@ -131,17 +117,15 @@ DELETED</code></pre>
|
|||
|
||||
<h3>Information about a file</h3>
|
||||
|
||||
<p>To retrieve information about a file, make a GET request the public url with
|
||||
<code>Accept: application/json</code> headers and you will receive a json response containing:</p>
|
||||
<p>To retrieve information about a file, make a GET request the public url with <code>Accept: application/json</code> headers and you will receive a json response containing:</p>
|
||||
|
||||
<blockquote>
|
||||
<p>“url”: the publicly available upload url<br />
|
||||
“direct_url”: the url to access the file directly<br />
|
||||
“filename”: the (optionally generated) filename<br />
|
||||
“expiry”: the unix timestamp at which the file will expire (0 if never)<br />
|
||||
“size”: the size in bytes of the file<br />
|
||||
“mimetype”: the guessed mimetype of the file<br />
|
||||
“sha256sum”: the sha256sum of the file,</p>
|
||||
<p>“url”: the publicly available upload url<br/>
|
||||
“filename”: the (optionally generated) filename<br/>
|
||||
“expiry”: the unix timestamp at which the file will expire (0 if never)<br/>
|
||||
“size”: the size in bytes of the file<br/>
|
||||
“mimetype”: the guessed mimetype of the file<br/>
|
||||
“sha256sum”: the sha256sum of the file,</p>
|
||||
</blockquote>
|
||||
|
||||
<p><strong>Example</strong></p>
|
||||
|
@ -149,6 +133,6 @@ DELETED</code></pre>
|
|||
<pre><code>$ curl -H "Accept: application/json" {{ siteurl }}myphoto.jpg
|
||||
{"expiry":"0","filename":"myphoto.jpg","mimetype":"image/jpeg","sha256sum":"...","size":"..."}</code></pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - Password protected file{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="main" class="oopscontent">
|
||||
<form action="{{ unlockpath }}" method="POST" enctype="multipart/form-data">
|
||||
{{ filename }} is protected with a password: <br /><br />
|
||||
<input name="access_key" type="password" />
|
||||
<input id="submitbtn" type="submit" value="Unlock">
|
||||
<br /><br />
|
||||
</form>
|
||||
</div>
|
||||
{% endblock %}
|
|
@ -1,11 +1,9 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<title>{% block title %}{{ sitename }}{% endblock %}</title>
|
||||
<meta charset='utf-8' content='text/html' http-equiv='content-type'>
|
||||
<meta name='viewport' content='width=device-width, initial-scale=0.8'>
|
||||
<link href='{{ sitepath }}static/css/linx.css?v=1' media='screen, projection' rel='stylesheet' type='text/css'>
|
||||
<link href='{{ sitepath }}static/css/linx.css' media='screen, projection' rel='stylesheet' type='text/css'>
|
||||
<link href='{{ sitepath }}static/css/hint.css' rel='stylesheet' type='text/css'>
|
||||
<link href='{{ sitepath }}static/images/favicon.gif' rel='icon' type='image/gif'>
|
||||
{% block head %}{% endblock %}
|
||||
|
@ -16,26 +14,22 @@
|
|||
<div id="container">
|
||||
<div id="header">
|
||||
<div id="navigation" class="right">
|
||||
{% if auth != "header" %}
|
||||
<a href="{{ sitepath }}">Upload</a> |
|
||||
<a href="{{ sitepath }}paste/">Paste</a> |
|
||||
{% if !using_auth %}
|
||||
<a href="{{ sitepath }}">Upload</a> |
|
||||
<a href="{{ sitepath }}paste/">Paste</a> |
|
||||
{% endif %}
|
||||
<a href="{{ sitepath }}API/">API</a>
|
||||
{% for custom_file_name, custom_page_name in custom_pages_names sorted %}
|
||||
| <a href="{{ sitepath }}{{ custom_file_name }}/">{{ custom_page_name }}</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<h2><a href="{{ sitepath }}" title="{{ sitename }}">{{ sitename }}</a></h2>
|
||||
<h2><a href="{{ sitepath }}" title="{{ sitename }}">{{ sitename }}</a></h2>
|
||||
</div>
|
||||
|
||||
{% block content %}{% endblock %}
|
||||
|
||||
<div id="footer">
|
||||
<a href="https://github.com/andreimarcu/linx-server">linx-server</a>
|
||||
<a href="https://github.com/andreimarcu/linx-server">linx</a>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
</html>
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - {{ pagename }}{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<link href="{{ sitepath }}static/css/github-markdown.css" rel="stylesheet" type="text/css">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="main">
|
||||
<div id='inner_content'>
|
||||
<div class="normal markdown-body">
|
||||
<h2>{{ pagename }}</h2>
|
||||
|
||||
{{ contents|safe }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
|
@ -1,12 +1,9 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block head %}
|
||||
<meta property="og:audio" content="{{ siteurl }}{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<audio class="display-audio" controls preload='auto'>
|
||||
<source src='{{ sitepath }}{{ selifpath }}{{ filename }}'>
|
||||
<a href='{{ sitepath }}{{ selifpath }}{{ filename }}'>Download it instead</a>
|
||||
<source src='{{ sitepath }}selif/{{ filename }}'>
|
||||
<a href='{{ sitepath }}selif/{{ filename }}'>Download it instead</a>
|
||||
</audio>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
||||
|
|
|
@ -1,36 +1,50 @@
|
|||
{% extends "../base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - {{ filename }}{% endblock %}
|
||||
{% block title %}{{ filename }}{% endblock %}
|
||||
|
||||
{% block bodymore %}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<div id="info" class="dinfo info-flex">
|
||||
<div id="filename">
|
||||
{{ filename }}
|
||||
<div id="info" class="dinfo">
|
||||
<div class="float-left" id="filename">
|
||||
{{ filename }}
|
||||
</div>
|
||||
|
||||
<div class="right">
|
||||
{% if expiry %}
|
||||
<span>file expires in {{ expiry }}</span> |
|
||||
{% endif %}
|
||||
{% block infomore %}{% endblock %}
|
||||
<span>{{ size }}</span> |
|
||||
{% if shorturlEnabled %}
|
||||
{% if shorturl %}
|
||||
<a class="hint--top" aria-label="Click to copy into clipboard" id="shorturl"
|
||||
style="cursor: pointer;" href="{{shorturl}}">{{shorturl}}</a> |
|
||||
{% else %}
|
||||
<a class="hint--top" aria-label="Click to retrieve shortened url" id="shorturl"
|
||||
data-url="{{ sitepath }}{{filename}}/short" style="cursor: pointer;">short url</a> |
|
||||
{% endif %}
|
||||
{% endif %}
|
||||
<a href="{{ filename }}/torrent" download>torrent</a> |
|
||||
<a href="{{ sitepath }}selif/{{ filename }}" download>get</a>
|
||||
</div>
|
||||
|
||||
{% block infoleft %}{% endblock %}
|
||||
<div class="clear"></div>
|
||||
</div>
|
||||
|
||||
<div class="info-actions">
|
||||
{% if expiry %}
|
||||
<span>file expires in {{ expiry }}</span> |
|
||||
{% endif %}
|
||||
{% block infomore %}{% endblock %}
|
||||
<span>{{ size }}</span> |
|
||||
<a href="{{ filename }}/torrent" download>torrent</a> |
|
||||
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}" download>get</a>
|
||||
<div id="main" {% block mainmore %}{% endblock %}>
|
||||
|
||||
<div id='inner_content' {% block innercontentmore %}{% endblock %} >
|
||||
{% block main %}{% endblock %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
{% block infoleft %}{% endblock %}
|
||||
</div>
|
||||
<script src="{{ sitepath }}static/js/clipboard.js"></script>
|
||||
|
||||
<div id="main" {% block mainmore %}{% endblock %}>
|
||||
|
||||
<div id='inner_content' {% block innercontentmore %}{% endblock %}>
|
||||
{% block main %}{% endblock %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<script src="{{ sitepath }}static/js/clipboard.js"></script>
|
||||
{% endblock %}
|
||||
{% if shorturlEnabled %}
|
||||
<script src="{{ sitepath }}static/js/shorturl.js"></script>
|
||||
{% endif %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -11,34 +11,37 @@
|
|||
|
||||
{% block infoleft %}
|
||||
<div id="editform">
|
||||
<form id="reply" action='{{ sitepath }}upload' method='post'>
|
||||
<div class="info-flex">
|
||||
<div>
|
||||
{% if not forcerandom %}<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename">{% endif %}.<input id="extension" class="codebox" name='extension' type='text' value="{{ extra.extension }}" placeholder="txt">
|
||||
</div>
|
||||
<div class="info-actions">
|
||||
<select id="expiry" name="expires">
|
||||
<option disabled>Expires:</option>
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button type="submit" id="save">Save</button>
|
||||
</div>
|
||||
<form id="reply" action='{{ sitepath }}upload' method='post' >
|
||||
<div class="right">
|
||||
<select id="expiry" name="expires">
|
||||
<option disabled=disabled>Expires:</option>
|
||||
<option value="0">never</option>
|
||||
<option value="60">a minute</option>
|
||||
<option value="300">5 minutes</option>
|
||||
<option value="3600">an hour</option>
|
||||
<option value="86400">a day</option>
|
||||
<option value="604800">a week</option>
|
||||
<option value="2419200">a month</option>
|
||||
<option value="29030400">a year</option>
|
||||
</select>
|
||||
|
||||
<button id="save">save</button>
|
||||
</div>
|
||||
|
||||
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)">.<input id="extension" class="codebox" name='extension' type='text' value="{{ extra.extension }}" placeholder="txt">
|
||||
<textarea name='content' id="newcontent" class="editor"></textarea>
|
||||
</form>
|
||||
</div>
|
||||
{% endblock %}
|
||||
|
||||
{% block infomore %}
|
||||
{%block infomore %}
|
||||
<label>wrap <input id="wordwrap" type="checkbox" checked></label> |
|
||||
{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<div id="normal-content" class="normal fixed">
|
||||
<pre id="normal-code"><code id="codeb" class="{{ extra.lang_hl }}">{{ extra.contents }}</code></pre>
|
||||
<textarea id="inplace-editor" class="editor">{{ extra.contents }}</textarea>
|
||||
<textarea id="editor" class="editor">{{ extra.contents }}</textarea>
|
||||
</div>
|
||||
|
||||
|
||||
|
@ -48,5 +51,5 @@
|
|||
{% endif %}
|
||||
|
||||
<script src="{{ sitepath }}static/js/util.js"></script>
|
||||
<script src="{{ sitepath }}static/js/bin.js?v=1"></script>
|
||||
<script src="{{ sitepath }}static/js/bin.js"></script>
|
||||
{% endblock %}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
{% block main %}
|
||||
<div class="normal display-file">
|
||||
<p class="center">You are requesting <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">{{ filename }}</a>, <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">click here</a> to download.</p>
|
||||
<p class="center">You are requesting <a href="{{ sitepath }}selif/{{ filename }}">{{ filename }}</a>, <a href="{{ sitepath }}selif/{{ filename }}">click here</a> to download.</p>
|
||||
|
||||
{% if files|length > 0 %}
|
||||
<p>Contents of the archive:</p>
|
||||
|
|
|
@ -1,11 +1,7 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block head %}
|
||||
<meta property="og:image" content="{{ siteurl }}{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}">
|
||||
<img class="display-image" src="{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
<a href="{{ sitepath }}selif/{{ filename }}">
|
||||
<img class="display-image" src="{{ sitepath }}selif/{{ filename }}" />
|
||||
</a>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block main %}
|
||||
<object class="display-pdf" data="{{ sitepath }}{{ selifpath }}{{ filename }}" type="application/pdf">
|
||||
<object class="display-pdf" data="{{ sitepath }}selif/{{ filename }}" type="application/pdf">
|
||||
|
||||
<p>It appears your Web browser is not configured to display PDF files.
|
||||
No worries, just <a href="{{ sitepath }}{{ selifpath }}{{ filename }}">click here to download the PDF file.</a></p>
|
||||
No worries, just <a href="{{ sitepath }}selif/{{ filename }}">click here to download the PDF file.</a></p>
|
||||
|
||||
</object>
|
||||
{% endblock %}
|
||||
|
|
|
@ -9,22 +9,24 @@
|
|||
|
||||
{% block infoleft %}
|
||||
<div id="editform">
|
||||
<form id="reply" action='{{ sitepath }}upload' method='post'>
|
||||
<div class="info-flex">
|
||||
<div>
|
||||
{% if not forcerandom %}<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename">{% endif %}.<input id="extension" class="codebox" name='extension' type='text' value="story" placeholder="txt">
|
||||
</div>
|
||||
<div class="info-actions">
|
||||
<select id="expiry" name="expires">
|
||||
<option disabled>Expires:</option>
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button type="submit" id="save">Save</button>
|
||||
</div>
|
||||
<form id="reply" action='{{ sitepath }}upload' method='post' >
|
||||
<div class="right">
|
||||
<select id="expiry" name="expires">
|
||||
<option disabled=disabled>Expires:</option>
|
||||
<option value="0">never</option>
|
||||
<option value="60">a minute</option>
|
||||
<option value="300">5 minutes</option>
|
||||
<option value="3600">an hour</option>
|
||||
<option value="86400">a day</option>
|
||||
<option value="604800">a week</option>
|
||||
<option value="2419200">a month</option>
|
||||
<option value="29030400">a year</option>
|
||||
</select>
|
||||
|
||||
<button id="save">save</button>
|
||||
</div>
|
||||
|
||||
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)">.<input id="extension" class="codebox" name='extension' type='text' value="story" placeholder="txt">
|
||||
<textarea name='content' id="newcontent" class="editor"></textarea>
|
||||
</form>
|
||||
</div>
|
||||
|
@ -37,10 +39,10 @@
|
|||
{% block main %}
|
||||
<div id="normal-content" class="normal">
|
||||
<pre id="normal-code"><code id="codeb" class="story">{% for line in lines %}{% if line|make_list|first == ">" %}<span class="storygreen">{{ line }}</span>{% else %}<span class="storyred">{{ line }}</span>{% endif %}{% endfor %}</code></pre>
|
||||
<textarea id="inplace-editor" class="editor">{{ extra.contents }}</textarea>
|
||||
<textarea id="editor" class="editor">{{ extra.contents }}</textarea>
|
||||
</div>
|
||||
|
||||
|
||||
<script src="{{ sitepath }}static/js/util.js"></script>
|
||||
<script src="{{ sitepath }}static/js/bin.js?v=1"></script>
|
||||
<script src="{{ sitepath }}static/js/bin.js"></script>
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,12 +1,8 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block head %}
|
||||
<meta property="og:video" content="{{ siteurl }}{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<video class="display-video" controls autoplay>
|
||||
<source src="{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
<a href='{{ sitepath }}{{ selifpath }}{{ filename }}'>Download it instead</a>
|
||||
<source src="{{ sitepath }}selif/{{ filename }}"/>
|
||||
<a href='{{ sitepath }}selif/{{ filename }}'>Download it instead</a>
|
||||
</video>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -4,46 +4,29 @@
|
|||
<link href='{{ sitepath }}static/css/dropzone.css' media='screen, projection' rel='stylesheet' type='text/css'>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% block content %}
|
||||
<div id="fileupload">
|
||||
<form action="{{ sitepath }}upload" class="dropzone" id="dropzone" method="POST" enctype="multipart/form-data"
|
||||
data-maxsize="{{ maxsize }}" data-auth="{{ auth }}">
|
||||
<form action="{{ sitepath }}upload" class="dropzone" id="dropzone" method="POST" enctype="multipart/form-data" data-maxsize="{{ maxsize }}">
|
||||
<div class="fallback">
|
||||
<input id="fileinput" name="file" type="file" /><br />
|
||||
<input id="submitbtn" type="submit" value="Upload">
|
||||
</div>
|
||||
|
||||
<div id="dzone" class="dz-default dz-message">
|
||||
<span>Click or Drop file(s) or Paste image</span>
|
||||
<span>Click or Drop file(s)</span>
|
||||
</div>
|
||||
|
||||
<div id="choices">
|
||||
<span class="hint--top hint--bounce"
|
||||
data-hint="Replace the filename with random characters. The file extension is retained">
|
||||
<label><input {% if forcerandom %} disabled {% endif %} name="randomize" id="randomize" type="checkbox"
|
||||
checked /> Randomize filename</label>
|
||||
</span>
|
||||
|
||||
<div id="expiry">
|
||||
<label>File expiry:
|
||||
<select name="expires" id="expires">
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}" {% if forloop.Last %} selected{% endif %}>
|
||||
{{ expiry.Human }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<label>File expiry:
|
||||
<select name="expires" id="expires">
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
<div id="access_key">
|
||||
<span class="hint--top hint--bounce"
|
||||
data-hint="Require password to access (this does not encrypt the file but only limits access)">
|
||||
<label>
|
||||
<input type="checkbox" id="access_key_checkbox" />
|
||||
<span id="access_key_text">Require access password</span>
|
||||
</label>
|
||||
<input id="access_key_input" name="access_key" type="text" placeholder="Access password" />
|
||||
</span>
|
||||
</div>
|
||||
<label><input name="randomize" id="randomize" type="checkbox" checked /> Randomize filename</label>
|
||||
</div>
|
||||
<div class="clear"></div>
|
||||
</form>
|
||||
|
@ -53,4 +36,4 @@
|
|||
|
||||
<script src="{{ sitepath }}static/js/dropzone.js"></script>
|
||||
<script src="{{ sitepath }}static/js/upload.js"></script>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
|
|
@ -1,40 +1,29 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - Paste{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<form id="reply" action='{{ sitepath }}upload' method='post'>
|
||||
<div id="main" class="paste">
|
||||
<div id="info" class="info-flex">
|
||||
<div>
|
||||
{% if not forcerandom %}<span class="hint--top hint--bounce"
|
||||
data-hint="Leave empty for random filename"><input class="codebox" name='filename' id="filename"
|
||||
type='text' value="" placeholder="filename" /></span>{% endif %}.<span
|
||||
class="hint--top hint--bounce" data-hint="Enable syntax highlighting by adding the extension"><input
|
||||
id="extension" class="codebox" name='extension' type='text' value="" placeholder="txt" /></span>
|
||||
</div>
|
||||
<div>
|
||||
<span class="hint--top hint--bounce" data-hint="Require password to access (leave empty to disable)">
|
||||
<input class="codebox" name="access_key" type="text" placeholder="password" />
|
||||
</span>
|
||||
<form id="reply" action='{{ sitepath }}upload' method='post'>
|
||||
<div id="main">
|
||||
<div id="info" class="ninfo">
|
||||
<input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename (empty for random filename)" />.<span class="hint--top hint--bounce" data-hint="Enable syntax highlighting by adding the extension"><input id="extension" class="codebox" name='extension' type='text' value="" placeholder="txt" /></span>
|
||||
|
||||
<select id="expiry" name="expires">
|
||||
<option disabled>Expires:</option>
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}" {% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button type="submit">Paste</button>
|
||||
<div class="right">
|
||||
<select id="expiry" name="expires">
|
||||
<option disabled="disabled">Expires:</option>
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
|
||||
<input type="submit" value="Paste">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="inner_content">
|
||||
<textarea name='content' id="content" class="editor"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div id="inner_content" class="padme">
|
||||
<textarea name='content' id="content" class="editor"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
</form>
|
||||
|
||||
<script src="{{ sitepath }}static/js/util.js"></script>
|
||||
<script src="{{ sitepath }}static/js/paste.js"></script>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
||||
|
|
69
torrent.go
69
torrent.go
|
@ -2,44 +2,64 @@ package main
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha1"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/expiry"
|
||||
"github.com/andreimarcu/linx-server/torrent"
|
||||
"github.com/zeebo/bencode"
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
func createTorrent(fileName string, f io.Reader, r *http.Request) ([]byte, error) {
|
||||
url := getSiteURL(r) + Config.selifPath + fileName
|
||||
chunk := make([]byte, torrent.TORRENT_PIECE_LENGTH)
|
||||
const (
|
||||
TORRENT_PIECE_LENGTH = 262144
|
||||
)
|
||||
|
||||
t := torrent.Torrent{
|
||||
type TorrentInfo struct {
|
||||
PieceLength int `bencode:"piece length"`
|
||||
Pieces string `bencode:"pieces"`
|
||||
Name string `bencode:"name"`
|
||||
Length int `bencode:"length"`
|
||||
}
|
||||
|
||||
type Torrent struct {
|
||||
Encoding string `bencode:"encoding"`
|
||||
Info TorrentInfo `bencode:"info"`
|
||||
UrlList []string `bencode:"url-list"`
|
||||
}
|
||||
|
||||
func hashPiece(piece []byte) []byte {
|
||||
h := sha1.New()
|
||||
h.Write(piece)
|
||||
return h.Sum(nil)
|
||||
}
|
||||
|
||||
func createTorrent(fileName string, f io.ReadCloser, r *http.Request) ([]byte, error) {
|
||||
chunk := make([]byte, TORRENT_PIECE_LENGTH)
|
||||
|
||||
torrent := Torrent{
|
||||
Encoding: "UTF-8",
|
||||
Info: torrent.TorrentInfo{
|
||||
PieceLength: torrent.TORRENT_PIECE_LENGTH,
|
||||
Info: TorrentInfo{
|
||||
PieceLength: TORRENT_PIECE_LENGTH,
|
||||
Name: fileName,
|
||||
},
|
||||
UrlList: []string{url},
|
||||
UrlList: []string{fmt.Sprintf("%sselif/%s", getSiteURL(r), fileName)},
|
||||
}
|
||||
|
||||
for {
|
||||
n, err := io.ReadFull(f, chunk)
|
||||
n, err := f.Read(chunk)
|
||||
if err == io.EOF {
|
||||
break
|
||||
} else if err != nil && err != io.ErrUnexpectedEOF {
|
||||
} else if err != nil {
|
||||
return []byte{}, err
|
||||
}
|
||||
|
||||
t.Info.Length += n
|
||||
t.Info.Pieces += string(torrent.HashPiece(chunk[:n]))
|
||||
torrent.Info.Length += n
|
||||
torrent.Info.Pieces += string(hashPiece(chunk[:n]))
|
||||
}
|
||||
|
||||
data, err := bencode.EncodeBytes(&t)
|
||||
data, err := bencode.EncodeBytes(&torrent)
|
||||
if err != nil {
|
||||
return []byte{}, err
|
||||
}
|
||||
|
@ -50,25 +70,22 @@ func createTorrent(fileName string, f io.Reader, r *http.Request) ([]byte, error
|
|||
func fileTorrentHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
fileName := c.URLParams["name"]
|
||||
|
||||
metadata, f, err := storageBackend.Get(fileName)
|
||||
if err == backends.NotFoundErr {
|
||||
err := checkFile(fileName)
|
||||
if err == NotFoundErr {
|
||||
notFoundHandler(c, w, r)
|
||||
return
|
||||
} else if err == backends.BadMetadata {
|
||||
} else if err == BadMetadata {
|
||||
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
|
||||
return
|
||||
} else if err != nil {
|
||||
oopsHandler(c, w, r, RespAUTO, err.Error())
|
||||
}
|
||||
|
||||
f, err := fileBackend.Open(fileName)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, "Could not create torrent.")
|
||||
return
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
if expiry.IsTsExpired(metadata.Expiry) {
|
||||
storageBackend.Delete(fileName)
|
||||
notFoundHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
encoded, err := createTorrent(fileName, f, r)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, "Could not create torrent.")
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
package torrent
|
||||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
)
|
||||
|
||||
const (
|
||||
TORRENT_PIECE_LENGTH = 262144
|
||||
)
|
||||
|
||||
type TorrentInfo struct {
|
||||
PieceLength int `bencode:"piece length"`
|
||||
Pieces string `bencode:"pieces"`
|
||||
Name string `bencode:"name"`
|
||||
Length int `bencode:"length"`
|
||||
}
|
||||
|
||||
type Torrent struct {
|
||||
Encoding string `bencode:"encoding"`
|
||||
Info TorrentInfo `bencode:"info"`
|
||||
UrlList []string `bencode:"url-list"`
|
||||
}
|
||||
|
||||
func HashPiece(piece []byte) []byte {
|
||||
h := sha1.New()
|
||||
h.Write(piece)
|
||||
return h.Sum(nil)
|
||||
}
|
|
@ -5,13 +5,12 @@ import (
|
|||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/andreimarcu/linx-server/torrent"
|
||||
"github.com/zeebo/bencode"
|
||||
)
|
||||
|
||||
func TestCreateTorrent(t *testing.T) {
|
||||
fileName := "server.go"
|
||||
var decoded torrent.Torrent
|
||||
var decoded Torrent
|
||||
|
||||
f, err := os.Open("server.go")
|
||||
if err != nil {
|
||||
|
@ -46,14 +45,14 @@ func TestCreateTorrent(t *testing.T) {
|
|||
t.Fatal("Length was less than or equal to 0, expected more")
|
||||
}
|
||||
|
||||
tracker := fmt.Sprintf("%s%s%s", Config.siteURL, Config.selifPath, fileName)
|
||||
tracker := fmt.Sprintf("%sselif/%s", Config.siteURL, fileName)
|
||||
if decoded.UrlList[0] != tracker {
|
||||
t.Fatalf("First entry in URL list was %s, expected %s", decoded.UrlList[0], tracker)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCreateTorrentWithImage(t *testing.T) {
|
||||
var decoded torrent.Torrent
|
||||
var decoded Torrent
|
||||
|
||||
f, err := os.Open("static/images/404.jpg")
|
||||
if err != nil {
|
||||
|
@ -68,7 +67,7 @@ func TestCreateTorrentWithImage(t *testing.T) {
|
|||
|
||||
bencode.DecodeBytes(encoded, &decoded)
|
||||
|
||||
if decoded.Info.Pieces != "\xd6\xff\xbf'^)\x85?\xb4.\xb0\xc1|\xa3\x83\xeeX\xf9\xfd\xd7" {
|
||||
if decoded.Info.Pieces != "r\x01\x80j\x99\x84\n\xd3dZ;1NX\xec;\x9d$+f" {
|
||||
t.Fatal("Torrent pieces did not match expected pieces for image")
|
||||
}
|
||||
}
|
||||
|
|
185
upload.go
185
upload.go
|
@ -15,15 +15,11 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/auth/apikeys"
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/expiry"
|
||||
"bitbucket.org/taruti/mimemagic"
|
||||
"github.com/dchest/uniuri"
|
||||
"github.com/gabriel-vasile/mimetype"
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
var FileTooLargeError = errors.New("File too large.")
|
||||
var fileBlacklist = map[string]bool{
|
||||
"favicon.ico": true,
|
||||
"index.htm": true,
|
||||
|
@ -36,23 +32,21 @@ var fileBlacklist = map[string]bool{
|
|||
// Describes metadata directly from the user request
|
||||
type UploadRequest struct {
|
||||
src io.Reader
|
||||
size int64
|
||||
filename string
|
||||
expiry time.Duration // Seconds until expiry, 0 = never
|
||||
deleteKey string // Empty string if not defined
|
||||
randomBarename bool
|
||||
accessKey string // Empty string if not defined
|
||||
deletionKey string // Empty string if not defined
|
||||
}
|
||||
|
||||
// Metadata associated with a file as it would actually be stored
|
||||
type Upload struct {
|
||||
Filename string // Final filename on disk
|
||||
Metadata backends.Metadata
|
||||
Metadata Metadata
|
||||
}
|
||||
|
||||
func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
if !strictReferrerCheck(r, getSiteURL(r), []string{"Linx-Delete-Key", "Linx-Expiry", "Linx-Randomize", "X-Requested-With"}) {
|
||||
badRequestHandler(c, w, r, RespAUTO, "")
|
||||
badRequestHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -69,40 +63,32 @@ func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
defer file.Close()
|
||||
|
||||
r.ParseForm()
|
||||
if r.Form.Get("randomize") == "true" {
|
||||
upReq.randomBarename = true
|
||||
}
|
||||
upReq.expiry = parseExpiry(r.Form.Get("expires"))
|
||||
upReq.src = file
|
||||
upReq.size = headers.Size
|
||||
upReq.filename = headers.Filename
|
||||
} else {
|
||||
if r.PostFormValue("content") == "" {
|
||||
badRequestHandler(c, w, r, RespAUTO, "Empty file")
|
||||
if r.FormValue("content") == "" {
|
||||
oopsHandler(c, w, r, RespHTML, "Empty file")
|
||||
return
|
||||
}
|
||||
extension := r.PostFormValue("extension")
|
||||
extension := r.FormValue("extension")
|
||||
if extension == "" {
|
||||
extension = "txt"
|
||||
}
|
||||
|
||||
content := r.PostFormValue("content")
|
||||
|
||||
upReq.src = strings.NewReader(content)
|
||||
upReq.size = int64(len(content))
|
||||
upReq.filename = r.PostFormValue("filename") + "." + extension
|
||||
}
|
||||
|
||||
upReq.expiry = parseExpiry(r.PostFormValue("expires"))
|
||||
upReq.accessKey = r.PostFormValue(accessKeyParamName)
|
||||
|
||||
if r.PostFormValue("randomize") == "true" {
|
||||
upReq.randomBarename = true
|
||||
upReq.src = strings.NewReader(r.FormValue("content"))
|
||||
upReq.expiry = parseExpiry(r.FormValue("expires"))
|
||||
upReq.filename = r.FormValue("filename") + "." + extension
|
||||
}
|
||||
|
||||
upload, err := processUpload(upReq)
|
||||
|
||||
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
if err == FileTooLargeError || err == backends.FileEmptyError {
|
||||
badRequestHandler(c, w, r, RespJSON, err.Error())
|
||||
return
|
||||
} else if err != nil {
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespJSON, "Could not upload file: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
@ -111,16 +97,14 @@ func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
|
||||
w.Write(js)
|
||||
} else {
|
||||
if err == FileTooLargeError || err == backends.FileEmptyError {
|
||||
badRequestHandler(c, w, r, RespHTML, err.Error())
|
||||
return
|
||||
} else if err != nil {
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, "Could not upload file: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, Config.sitePath+upload.Filename, 303)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
|
@ -129,15 +113,12 @@ func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
defer r.Body.Close()
|
||||
upReq.filename = c.URLParams["name"]
|
||||
upReq.src = http.MaxBytesReader(w, r.Body, Config.maxSize)
|
||||
upReq.src = r.Body
|
||||
|
||||
upload, err := processUpload(upReq)
|
||||
|
||||
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
if err == FileTooLargeError || err == backends.FileEmptyError {
|
||||
badRequestHandler(c, w, r, RespJSON, err.Error())
|
||||
return
|
||||
} else if err != nil {
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespJSON, "Could not upload file: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
@ -146,10 +127,7 @@ func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
w.Header().Set("Content-Type", "application/json; charset=UTF-8")
|
||||
w.Write(js)
|
||||
} else {
|
||||
if err == FileTooLargeError || err == backends.FileEmptyError {
|
||||
badRequestHandler(c, w, r, RespPLAIN, err.Error())
|
||||
return
|
||||
} else if err != nil {
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespPLAIN, "Could not upload file: "+err.Error())
|
||||
return
|
||||
}
|
||||
|
@ -160,22 +138,8 @@ func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
if Config.remoteAuthFile != "" {
|
||||
key := r.FormValue("key")
|
||||
if key == "" && Config.basicAuth {
|
||||
_, password, ok := r.BasicAuth()
|
||||
if ok {
|
||||
key = password
|
||||
}
|
||||
}
|
||||
result, err := apikeys.CheckAuth(remoteAuthKeys, key)
|
||||
result, err := checkAuth(remoteAuthKeys, r.FormValue("key"))
|
||||
if err != nil || !result {
|
||||
if Config.basicAuth {
|
||||
rs := ""
|
||||
if Config.siteName != "" {
|
||||
rs = fmt.Sprintf(` realm="%s"`, Config.siteName)
|
||||
}
|
||||
w.Header().Set("WWW-Authenticate", `Basic`+rs)
|
||||
}
|
||||
unauthorizedHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
@ -188,7 +152,6 @@ func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
upReq := UploadRequest{}
|
||||
grabUrl, _ := url.Parse(r.FormValue("url"))
|
||||
directURL := r.FormValue("direct_url") == "yes"
|
||||
|
||||
resp, err := http.Get(grabUrl.String())
|
||||
if err != nil {
|
||||
|
@ -197,9 +160,8 @@ func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
upReq.filename = filepath.Base(grabUrl.Path)
|
||||
upReq.src = http.MaxBytesReader(w, resp.Body, Config.maxSize)
|
||||
upReq.deleteKey = r.FormValue("deletekey")
|
||||
upReq.accessKey = r.FormValue(accessKeyParamName)
|
||||
upReq.src = resp.Body
|
||||
upReq.deletionKey = r.FormValue("deletekey")
|
||||
upReq.randomBarename = r.FormValue("randomize") == "yes"
|
||||
upReq.expiry = parseExpiry(r.FormValue("expiry"))
|
||||
|
||||
|
@ -220,11 +182,7 @@ func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if directURL {
|
||||
http.Redirect(w, r, Config.sitePath+Config.selifPath+upload.Filename, 303)
|
||||
} else {
|
||||
http.Redirect(w, r, Config.sitePath+upload.Filename, 303)
|
||||
}
|
||||
http.Redirect(w, r, Config.sitePath+upload.Filename, 303)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -233,27 +191,20 @@ func uploadHeaderProcess(r *http.Request, upReq *UploadRequest) {
|
|||
upReq.randomBarename = true
|
||||
}
|
||||
|
||||
upReq.deleteKey = r.Header.Get("Linx-Delete-Key")
|
||||
upReq.accessKey = r.Header.Get(accessKeyHeaderName)
|
||||
upReq.deletionKey = r.Header.Get("Linx-Delete-Key")
|
||||
|
||||
// Get seconds until expiry. Non-integer responses never expire.
|
||||
expStr := r.Header.Get("Linx-Expiry")
|
||||
upReq.expiry = parseExpiry(expStr)
|
||||
|
||||
}
|
||||
|
||||
func processUpload(upReq UploadRequest) (upload Upload, err error) {
|
||||
if upReq.size > Config.maxSize {
|
||||
return upload, FileTooLargeError
|
||||
}
|
||||
|
||||
// Determine the appropriate filename
|
||||
// Determine the appropriate filename, then write to disk
|
||||
barename, extension := barePlusExt(upReq.filename)
|
||||
randomize := false
|
||||
|
||||
// Randomize the "barename" (filename without extension) if needed
|
||||
if upReq.randomBarename || len(barename) == 0 {
|
||||
barename = generateBarename()
|
||||
randomize = true
|
||||
}
|
||||
|
||||
var header []byte
|
||||
|
@ -262,60 +213,47 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
|
|||
header = make([]byte, 512)
|
||||
n, _ := upReq.src.Read(header)
|
||||
if n == 0 {
|
||||
return upload, backends.FileEmptyError
|
||||
return upload, errors.New("Empty file")
|
||||
}
|
||||
header = header[:n]
|
||||
|
||||
// Determine the type of file from header
|
||||
kind := mimetype.Detect(header)
|
||||
if len(kind.Extension()) < 2 {
|
||||
extension = "file"
|
||||
mimetype := mimemagic.Match("", header)
|
||||
|
||||
// If the mime type is in our map, use that
|
||||
// otherwise just use "ext"
|
||||
if val, exists := mimeToExtension[mimetype]; exists {
|
||||
extension = val
|
||||
} else {
|
||||
extension = kind.Extension()[1:] // remove leading "."
|
||||
extension = "ext"
|
||||
}
|
||||
}
|
||||
|
||||
upload.Filename = strings.Join([]string{barename, extension}, ".")
|
||||
upload.Filename = strings.Replace(upload.Filename, " ", "", -1)
|
||||
|
||||
fileexists, _ := storageBackend.Exists(upload.Filename)
|
||||
fileexists, _ := fileBackend.Exists(upload.Filename)
|
||||
|
||||
// Check if the delete key matches, in which case overwrite
|
||||
if fileexists {
|
||||
metad, merr := storageBackend.Head(upload.Filename)
|
||||
metad, merr := metadataRead(upload.Filename)
|
||||
if merr == nil {
|
||||
if upReq.deleteKey == metad.DeleteKey {
|
||||
if upReq.deletionKey == metad.DeleteKey {
|
||||
fileexists = false
|
||||
} else if Config.forceRandomFilename == true {
|
||||
// the file exists
|
||||
// the delete key doesn't match
|
||||
// force random filenames is enabled
|
||||
randomize = true
|
||||
}
|
||||
}
|
||||
} else if Config.forceRandomFilename == true {
|
||||
// the file doesn't exist
|
||||
// force random filenames is enabled
|
||||
randomize = true
|
||||
|
||||
// set fileexists to true to generate a new barename
|
||||
fileexists = true
|
||||
}
|
||||
|
||||
for fileexists {
|
||||
if randomize {
|
||||
barename = generateBarename()
|
||||
counter, err := strconv.Atoi(string(barename[len(barename)-1]))
|
||||
if err != nil {
|
||||
barename = barename + "1"
|
||||
} else {
|
||||
counter, err := strconv.Atoi(string(barename[len(barename)-1]))
|
||||
if err != nil {
|
||||
barename = barename + "1"
|
||||
} else {
|
||||
barename = barename[:len(barename)-1] + strconv.Itoa(counter+1)
|
||||
}
|
||||
barename = barename[:len(barename)-1] + strconv.Itoa(counter+1)
|
||||
}
|
||||
upload.Filename = strings.Join([]string{barename, extension}, ".")
|
||||
|
||||
fileexists, err = storageBackend.Exists(upload.Filename)
|
||||
fileexists, err = fileBackend.Exists(upload.Filename)
|
||||
}
|
||||
|
||||
if fileBlacklist[strings.ToLower(upload.Filename)] {
|
||||
|
@ -323,22 +261,31 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
|
|||
}
|
||||
|
||||
// Get the rest of the metadata needed for storage
|
||||
var fileExpiry time.Time
|
||||
var expiry time.Time
|
||||
if upReq.expiry == 0 {
|
||||
fileExpiry = expiry.NeverExpire
|
||||
expiry = neverExpire
|
||||
} else {
|
||||
fileExpiry = time.Now().Add(upReq.expiry)
|
||||
expiry = time.Now().Add(upReq.expiry)
|
||||
}
|
||||
|
||||
if upReq.deleteKey == "" {
|
||||
upReq.deleteKey = uniuri.NewLen(30)
|
||||
}
|
||||
|
||||
upload.Metadata, err = storageBackend.Put(upload.Filename, io.MultiReader(bytes.NewReader(header), upReq.src), fileExpiry, upReq.deleteKey, upReq.accessKey)
|
||||
bytes, err := fileBackend.Put(upload.Filename, io.MultiReader(bytes.NewReader(header), upReq.src))
|
||||
if err != nil {
|
||||
return upload, err
|
||||
} else if bytes > Config.maxSize {
|
||||
fileBackend.Delete(upload.Filename)
|
||||
return upload, errors.New("File too large")
|
||||
}
|
||||
|
||||
upload.Metadata, err = generateMetadata(upload.Filename, expiry, upReq.deletionKey)
|
||||
if err != nil {
|
||||
fileBackend.Delete(upload.Filename)
|
||||
return
|
||||
}
|
||||
err = metadataWrite(upload.Filename, &upload.Metadata)
|
||||
if err != nil {
|
||||
fileBackend.Delete(upload.Filename)
|
||||
return
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -349,10 +296,8 @@ func generateBarename() string {
|
|||
func generateJSONresponse(upload Upload, r *http.Request) []byte {
|
||||
js, _ := json.Marshal(map[string]string{
|
||||
"url": getSiteURL(r) + upload.Filename,
|
||||
"direct_url": getSiteURL(r) + Config.selifPath + upload.Filename,
|
||||
"filename": upload.Filename,
|
||||
"delete_key": upload.Metadata.DeleteKey,
|
||||
"access_key": upload.Metadata.AccessKey,
|
||||
"expiry": strconv.FormatInt(upload.Metadata.Expiry.Unix(), 10),
|
||||
"size": strconv.FormatInt(upload.Metadata.Size, 10),
|
||||
"mimetype": upload.Metadata.Mimetype,
|
||||
|
@ -400,14 +345,14 @@ func parseExpiry(expStr string) time.Duration {
|
|||
if expStr == "" {
|
||||
return time.Duration(Config.maxExpiry) * time.Second
|
||||
} else {
|
||||
fileExpiry, err := strconv.ParseUint(expStr, 10, 64)
|
||||
expiry, err := strconv.ParseUint(expStr, 10, 64)
|
||||
if err != nil {
|
||||
return time.Duration(Config.maxExpiry) * time.Second
|
||||
} else {
|
||||
if Config.maxExpiry > 0 && (fileExpiry > Config.maxExpiry || fileExpiry == 0) {
|
||||
fileExpiry = Config.maxExpiry
|
||||
if Config.maxExpiry > 0 && expiry > Config.maxExpiry {
|
||||
expiry = Config.maxExpiry
|
||||
}
|
||||
return time.Duration(fileExpiry) * time.Second
|
||||
return time.Duration(expiry) * time.Second
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
915
util.go
915
util.go
|
@ -1,10 +1,15 @@
|
|||
package main
|
||||
|
||||
func extensionToHlLang(extension string) (hlExt string) {
|
||||
func extensionToHlAndAceLangs(extension string) (hlExt, aceExt string) {
|
||||
hlExt, exists := extensionToHl[extension]
|
||||
if !exists {
|
||||
hlExt = "text"
|
||||
}
|
||||
|
||||
aceExt, exists = extensionToAce[extension]
|
||||
if !exists {
|
||||
aceExt = "text"
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -13,63 +18,853 @@ func supportedBinExtension(extension string) bool {
|
|||
return exists
|
||||
}
|
||||
|
||||
var extensionToHl = map[string]string{
|
||||
"ahk": "autohotkey",
|
||||
"apache": "apache",
|
||||
"applescript": "applescript",
|
||||
"bas": "basic",
|
||||
"bash": "sh",
|
||||
"bat": "dos",
|
||||
"c": "cpp",
|
||||
"cfc": "coldfusion",
|
||||
"clj": "clojure",
|
||||
"cmake": "cmake",
|
||||
"coffee": "coffee",
|
||||
"cpp": "c_cpp",
|
||||
"cs": "csharp",
|
||||
"css": "css",
|
||||
"d": "d",
|
||||
"dart": "dart",
|
||||
"diff": "diff",
|
||||
"dockerfile": "dockerfile",
|
||||
"elm": "elm",
|
||||
"erl": "erlang",
|
||||
"for": "fortran",
|
||||
"go": "go",
|
||||
"h": "cpp",
|
||||
"htm": "html",
|
||||
"html": "html",
|
||||
"ini": "ini",
|
||||
"java": "java",
|
||||
"js": "javascript",
|
||||
"json": "json",
|
||||
"jsp": "jsp",
|
||||
"kt": "kotlin",
|
||||
"less": "less",
|
||||
"lisp": "lisp",
|
||||
"lua": "lua",
|
||||
"m": "objectivec",
|
||||
"nginx": "nginx",
|
||||
"ocaml": "ocaml",
|
||||
"php": "php",
|
||||
"pl": "perl",
|
||||
"proto": "protobuf",
|
||||
"ps": "powershell",
|
||||
"py": "python",
|
||||
"rb": "ruby",
|
||||
"rs": "rust",
|
||||
"scala": "scala",
|
||||
"scm": "scheme",
|
||||
"scpt": "applescript",
|
||||
"scss": "scss",
|
||||
"sh": "sh",
|
||||
"sql": "sql",
|
||||
"tcl": "tcl",
|
||||
"tex": "latex",
|
||||
"toml": "ini",
|
||||
"ts": "typescript",
|
||||
"txt": "text",
|
||||
"xml": "xml",
|
||||
"yaml": "yaml",
|
||||
"yml": "yaml",
|
||||
var mimeToExtension = map[string]string{
|
||||
"application/andrew-inset": "ez",
|
||||
"application/applixware": "aw",
|
||||
"application/atom+xml": "atom",
|
||||
"application/atomcat+xml": "atomcat",
|
||||
"application/atomsvc+xml": "atomsvc",
|
||||
"application/ccxml+xml": "ccxml",
|
||||
"application/cdmi-capability": "cdmia",
|
||||
"application/cdmi-container": "cdmic",
|
||||
"application/cdmi-domain": "cdmid",
|
||||
"application/cdmi-object": "cdmio",
|
||||
"application/cdmi-queue": "cdmiq",
|
||||
"application/cu-seeme": "cu",
|
||||
"application/davmount+xml": "davmount",
|
||||
"application/docbook+xml": "dbk",
|
||||
"application/dssc+der": "dssc",
|
||||
"application/dssc+xml": "xdssc",
|
||||
"application/ecmascript": "ecma",
|
||||
"application/emma+xml": "emma",
|
||||
"application/epub+zip": "epub",
|
||||
"application/exi": "exi",
|
||||
"application/font-tdpfr": "pfr",
|
||||
"application/gml+xml": "gml",
|
||||
"application/gpx+xml": "gpx",
|
||||
"application/gxf": "gxf",
|
||||
"application/hyperstudio": "stk",
|
||||
"application/inkml+xml": "ink",
|
||||
"application/ipfix": "ipfix",
|
||||
"application/java-archive": "jar",
|
||||
"application/java-serialized-object": "ser",
|
||||
"application/java-vm": "class",
|
||||
"application/javascript": "js",
|
||||
"application/json": "json",
|
||||
"application/jsonml+json": "jsonml",
|
||||
"application/lost+xml": "lostxml",
|
||||
"application/mac-binhex40": "hqx",
|
||||
"application/mac-compactpro": "cpt",
|
||||
"application/mads+xml": "mads",
|
||||
"application/marc": "mrc",
|
||||
"application/marcxml+xml": "mrcx",
|
||||
"application/mathematica": "ma",
|
||||
"application/mathml+xml": "mathml",
|
||||
"application/mbox": "mbox",
|
||||
"application/mediaservercontrol+xml": "mscml",
|
||||
"application/metalink+xml": "metalink",
|
||||
"application/metalink4+xml": "meta4",
|
||||
"application/mets+xml": "mets",
|
||||
"application/mods+xml": "mods",
|
||||
"application/mp21": "mp21",
|
||||
"application/mp4": "mp4s",
|
||||
"application/msword": "doc",
|
||||
"application/mxf": "mxf",
|
||||
"application/octet-stream": "bin",
|
||||
"application/oda": "oda",
|
||||
"application/oebps-package+xml": "opf",
|
||||
"application/ogg": "ogx",
|
||||
"application/omdoc+xml": "omdoc",
|
||||
"application/onenote": "onetoc",
|
||||
"application/oxps": "oxps",
|
||||
"application/patch-ops-error+xml": "xer",
|
||||
"application/pdf": "pdf",
|
||||
"application/pgp-encrypted": "pgp",
|
||||
"application/pgp-signature": "asc",
|
||||
"application/pics-rules": "prf",
|
||||
"application/pkcs10": "p10",
|
||||
"application/pkcs7-mime": "p7m",
|
||||
"application/pkcs7-signature": "p7s",
|
||||
"application/pkcs8": "p8",
|
||||
"application/pkix-attr-cert": "ac",
|
||||
"application/pkix-cert": "cer",
|
||||
"application/pkix-crl": "crl",
|
||||
"application/pkix-pkipath": "pkipath",
|
||||
"application/pkixcmp": "pki",
|
||||
"application/pls+xml": "pls",
|
||||
"application/postscript": "ps",
|
||||
"application/prs.cww": "cww",
|
||||
"application/pskc+xml": "pskcxml",
|
||||
"application/rdf+xml": "rdf",
|
||||
"application/reginfo+xml": "rif",
|
||||
"application/relax-ng-compact-syntax": "rnc",
|
||||
"application/resource-lists+xml": "rl",
|
||||
"application/resource-lists-diff+xml": "rld",
|
||||
"application/rls-services+xml": "rs",
|
||||
"application/rpki-ghostbusters": "gbr",
|
||||
"application/rpki-manifest": "mft",
|
||||
"application/rpki-roa": "roa",
|
||||
"application/rsd+xml": "rsd",
|
||||
"application/rss+xml": "rss",
|
||||
"application/rtf": "rtf",
|
||||
"application/sbml+xml": "sbml",
|
||||
"application/scvp-cv-request": "scq",
|
||||
"application/scvp-cv-response": "scs",
|
||||
"application/scvp-vp-request": "spq",
|
||||
"application/scvp-vp-response": "spp",
|
||||
"application/sdp": "sdp",
|
||||
"application/set-payment-initiation": "setpay",
|
||||
"application/set-registration-initiation": "setreg",
|
||||
"application/shf+xml": "shf",
|
||||
"application/smil+xml": "smil",
|
||||
"application/sparql-query": "rq",
|
||||
"application/sparql-results+xml": "srx",
|
||||
"application/srgs": "gram",
|
||||
"application/srgs+xml": "grxml",
|
||||
"application/sru+xml": "sru",
|
||||
"application/ssdl+xml": "ssdl",
|
||||
"application/ssml+xml": "ssml",
|
||||
"application/tei+xml": "tei",
|
||||
"application/thraud+xml": "tfi",
|
||||
"application/timestamped-data": "tsd",
|
||||
"application/vnd.3gpp.pic-bw-large": "plb",
|
||||
"application/vnd.3gpp.pic-bw-small": "psb",
|
||||
"application/vnd.3gpp.pic-bw-var": "pvb",
|
||||
"application/vnd.3gpp2.tcap": "tcap",
|
||||
"application/vnd.3m.post-it-notes": "pwn",
|
||||
"application/vnd.accpac.simply.aso": "aso",
|
||||
"application/vnd.accpac.simply.imp": "imp",
|
||||
"application/vnd.acucobol": "acu",
|
||||
"application/vnd.acucorp": "atc",
|
||||
"application/vnd.adobe.air-application-installer-package+zip": "air",
|
||||
"application/vnd.adobe.formscentral.fcdt": "fcdt",
|
||||
"application/vnd.adobe.fxp": "fxp",
|
||||
"application/vnd.adobe.xdp+xml": "xdp",
|
||||
"application/vnd.adobe.xfdf": "xfdf",
|
||||
"application/vnd.ahead.space": "ahead",
|
||||
"application/vnd.airzip.filesecure.azf": "azf",
|
||||
"application/vnd.airzip.filesecure.azs": "azs",
|
||||
"application/vnd.amazon.ebook": "azw",
|
||||
"application/vnd.americandynamics.acc": "acc",
|
||||
"application/vnd.amiga.ami": "ami",
|
||||
"application/vnd.android.package-archive": "apk",
|
||||
"application/vnd.anser-web-certificate-issue-initiation": "cii",
|
||||
"application/vnd.anser-web-funds-transfer-initiation": "fti",
|
||||
"application/vnd.antix.game-component": "atx",
|
||||
"application/vnd.apple.installer+xml": "mpkg",
|
||||
"application/vnd.apple.mpegurl": "m3u8",
|
||||
"application/vnd.aristanetworks.swi": "swi",
|
||||
"application/vnd.astraea-software.iota": "iota",
|
||||
"application/vnd.audiograph": "aep",
|
||||
"application/vnd.blueice.multipass": "mpm",
|
||||
"application/vnd.bmi": "bmi",
|
||||
"application/vnd.businessobjects": "rep",
|
||||
"application/vnd.chemdraw+xml": "cdxml",
|
||||
"application/vnd.chipnuts.karaoke-mmd": "mmd",
|
||||
"application/vnd.cinderella": "cdy",
|
||||
"application/vnd.claymore": "cla",
|
||||
"application/vnd.cloanto.rp9": "rp9",
|
||||
"application/vnd.clonk.c4group": "c4g",
|
||||
"application/vnd.cluetrust.cartomobile-config": "c11amc",
|
||||
"application/vnd.cluetrust.cartomobile-config-pkg": "c11amz",
|
||||
"application/vnd.commonspace": "csp",
|
||||
"application/vnd.contact.cmsg": "cdbcmsg",
|
||||
"application/vnd.cosmocaller": "cmc",
|
||||
"application/vnd.crick.clicker": "clkx",
|
||||
"application/vnd.crick.clicker.keyboard": "clkk",
|
||||
"application/vnd.crick.clicker.palette": "clkp",
|
||||
"application/vnd.crick.clicker.template": "clkt",
|
||||
"application/vnd.crick.clicker.wordbank": "clkw",
|
||||
"application/vnd.criticaltools.wbs+xml": "wbs",
|
||||
"application/vnd.ctc-posml": "pml",
|
||||
"application/vnd.cups-ppd": "ppd",
|
||||
"application/vnd.curl.car": "car",
|
||||
"application/vnd.curl.pcurl": "pcurl",
|
||||
"application/vnd.dart": "dart",
|
||||
"application/vnd.data-vision.rdz": "rdz",
|
||||
"application/vnd.dece.data": "uvf",
|
||||
"application/vnd.dece.ttml+xml": "uvt",
|
||||
"application/vnd.dece.unspecified": "uvx",
|
||||
"application/vnd.dece.zip": "uvz",
|
||||
"application/vnd.denovo.fcselayout-link": "fe_launch",
|
||||
"application/vnd.dna": "dna",
|
||||
"application/vnd.dolby.mlp": "mlp",
|
||||
"application/vnd.dpgraph": "dpg",
|
||||
"application/vnd.dreamfactory": "dfac",
|
||||
"application/vnd.ds-keypoint": "kpxx",
|
||||
"application/vnd.dvb.ait": "ait",
|
||||
"application/vnd.dvb.service": "svc",
|
||||
"application/vnd.dynageo": "geo",
|
||||
"application/vnd.ecowin.chart": "mag",
|
||||
"application/vnd.enliven": "nml",
|
||||
"application/vnd.epson.esf": "esf",
|
||||
"application/vnd.epson.msf": "msf",
|
||||
"application/vnd.epson.quickanime": "qam",
|
||||
"application/vnd.epson.salt": "slt",
|
||||
"application/vnd.epson.ssf": "ssf",
|
||||
"application/vnd.eszigno3+xml": "es3",
|
||||
"application/vnd.ezpix-album": "ez2",
|
||||
"application/vnd.ezpix-package": "ez3",
|
||||
"application/vnd.fdf": "fdf",
|
||||
"application/vnd.fdsn.mseed": "mseed",
|
||||
"application/vnd.fdsn.seed": "seed",
|
||||
"application/vnd.flographit": "gph",
|
||||
"application/vnd.fluxtime.clip": "ftc",
|
||||
"application/vnd.framemaker": "fm",
|
||||
"application/vnd.frogans.fnc": "fnc",
|
||||
"application/vnd.frogans.ltf": "ltf",
|
||||
"application/vnd.fsc.weblaunch": "fsc",
|
||||
"application/vnd.fujitsu.oasys": "oas",
|
||||
"application/vnd.fujitsu.oasys2": "oa2",
|
||||
"application/vnd.fujitsu.oasys3": "oa3",
|
||||
"application/vnd.fujitsu.oasysgp": "fg5",
|
||||
"application/vnd.fujitsu.oasysprs": "bh2",
|
||||
"application/vnd.fujixerox.ddd": "ddd",
|
||||
"application/vnd.fujixerox.docuworks": "xdw",
|
||||
"application/vnd.fujixerox.docuworks.binder": "xbd",
|
||||
"application/vnd.fuzzysheet": "fzs",
|
||||
"application/vnd.genomatix.tuxedo": "txd",
|
||||
"application/vnd.geogebra.file": "ggb",
|
||||
"application/vnd.geogebra.tool": "ggt",
|
||||
"application/vnd.geometry-explorer": "gex",
|
||||
"application/vnd.geonext": "gxt",
|
||||
"application/vnd.geoplan": "g2w",
|
||||
"application/vnd.geospace": "g3w",
|
||||
"application/vnd.gmx": "gmx",
|
||||
"application/vnd.google-earth.kml+xml": "kml",
|
||||
"application/vnd.google-earth.kmz": "kmz",
|
||||
"application/vnd.grafeq": "gqf",
|
||||
"application/vnd.groove-account": "gac",
|
||||
"application/vnd.groove-help": "ghf",
|
||||
"application/vnd.groove-identity-message": "gim",
|
||||
"application/vnd.groove-injector": "grv",
|
||||
"application/vnd.groove-tool-message": "gtm",
|
||||
"application/vnd.groove-tool-template": "tpl",
|
||||
"application/vnd.groove-vcard": "vcg",
|
||||
"application/vnd.hal+xml": "hal",
|
||||
"application/vnd.handheld-entertainment+xml": "zmm",
|
||||
"application/vnd.hbci": "hbci",
|
||||
"application/vnd.hhe.lesson-player": "les",
|
||||
"application/vnd.hp-hpgl": "hpgl",
|
||||
"application/vnd.hp-hpid": "hpid",
|
||||
"application/vnd.hp-hps": "hps",
|
||||
"application/vnd.hp-jlyt": "jlt",
|
||||
"application/vnd.hp-pcl": "pcl",
|
||||
"application/vnd.hp-pclxl": "pclxl",
|
||||
"application/vnd.hydrostatix.sof-data": "sfd-hdstx",
|
||||
"application/vnd.ibm.minipay": "mpy",
|
||||
"application/vnd.ibm.modcap": "afp",
|
||||
"application/vnd.ibm.rights-management": "irm",
|
||||
"application/vnd.ibm.secure-container": "sc",
|
||||
"application/vnd.iccprofile": "icc",
|
||||
"application/vnd.igloader": "igl",
|
||||
"application/vnd.immervision-ivp": "ivp",
|
||||
"application/vnd.immervision-ivu": "ivu",
|
||||
"application/vnd.insors.igm": "igm",
|
||||
"application/vnd.intercon.formnet": "xpw",
|
||||
"application/vnd.intergeo": "i2g",
|
||||
"application/vnd.intu.qbo": "qbo",
|
||||
"application/vnd.intu.qfx": "qfx",
|
||||
"application/vnd.ipunplugged.rcprofile": "rcprofile",
|
||||
"application/vnd.irepository.package+xml": "irp",
|
||||
"application/vnd.is-xpr": "xpr",
|
||||
"application/vnd.isac.fcs": "fcs",
|
||||
"application/vnd.jam": "jam",
|
||||
"application/vnd.jcp.javame.midlet-rms": "rms",
|
||||
"application/vnd.jisp": "jisp",
|
||||
"application/vnd.joost.joda-archive": "joda",
|
||||
"application/vnd.kahootz": "ktz",
|
||||
"application/vnd.kde.karbon": "karbon",
|
||||
"application/vnd.kde.kchart": "chrt",
|
||||
"application/vnd.kde.kformula": "kfo",
|
||||
"application/vnd.kde.kivio": "flw",
|
||||
"application/vnd.kde.kontour": "kon",
|
||||
"application/vnd.kde.kpresenter": "kpr",
|
||||
"application/vnd.kde.kspread": "ksp",
|
||||
"application/vnd.kde.kword": "kwd",
|
||||
"application/vnd.kenameaapp": "htke",
|
||||
"application/vnd.kidspiration": "kia",
|
||||
"application/vnd.kinar": "kne",
|
||||
"application/vnd.koan": "skp",
|
||||
"application/vnd.kodak-descriptor": "sse",
|
||||
"application/vnd.las.las+xml": "lasxml",
|
||||
"application/vnd.llamagraphics.life-balance.desktop": "lbd",
|
||||
"application/vnd.llamagraphics.life-balance.exchange+xml": "lbe",
|
||||
"application/vnd.lotus-1-2-3": "123",
|
||||
"application/vnd.lotus-approach": "apr",
|
||||
"application/vnd.lotus-freelance": "pre",
|
||||
"application/vnd.lotus-notes": "nsf",
|
||||
"application/vnd.lotus-organizer": "org",
|
||||
"application/vnd.lotus-screencam": "scm",
|
||||
"application/vnd.lotus-wordpro": "lwp",
|
||||
"application/vnd.macports.portpkg": "portpkg",
|
||||
"application/vnd.mcd": "mcd",
|
||||
"application/vnd.medcalcdata": "mc1",
|
||||
"application/vnd.mediastation.cdkey": "cdkey",
|
||||
"application/vnd.mfer": "mwf",
|
||||
"application/vnd.mfmp": "mfm",
|
||||
"application/vnd.micrografx.flo": "flo",
|
||||
"application/vnd.micrografx.igx": "igx",
|
||||
"application/vnd.mif": "mif",
|
||||
"application/vnd.mobius.daf": "daf",
|
||||
"application/vnd.mobius.dis": "dis",
|
||||
"application/vnd.mobius.mbk": "mbk",
|
||||
"application/vnd.mobius.mqy": "mqy",
|
||||
"application/vnd.mobius.msl": "msl",
|
||||
"application/vnd.mobius.plc": "plc",
|
||||
"application/vnd.mobius.txf": "txf",
|
||||
"application/vnd.mophun.application": "mpn",
|
||||
"application/vnd.mophun.certificate": "mpc",
|
||||
"application/vnd.mozilla.xul+xml": "xul",
|
||||
"application/vnd.ms-artgalry": "cil",
|
||||
"application/vnd.ms-cab-compressed": "cab",
|
||||
"application/vnd.ms-excel": "xls",
|
||||
"application/vnd.ms-excel.addin.macroenabled.12": "xlam",
|
||||
"application/vnd.ms-excel.sheet.binary.macroenabled.12": "xlsb",
|
||||
"application/vnd.ms-excel.sheet.macroenabled.12": "xlsm",
|
||||
"application/vnd.ms-excel.template.macroenabled.12": "xltm",
|
||||
"application/vnd.ms-fontobject": "eot",
|
||||
"application/vnd.ms-htmlhelp": "chm",
|
||||
"application/vnd.ms-ims": "ims",
|
||||
"application/vnd.ms-lrm": "lrm",
|
||||
"application/vnd.ms-officetheme": "thmx",
|
||||
"application/vnd.ms-pki.seccat": "cat",
|
||||
"application/vnd.ms-pki.stl": "stl",
|
||||
"application/vnd.ms-powerpoint": "ppt",
|
||||
"application/vnd.ms-powerpoint.addin.macroenabled.12": "ppam",
|
||||
"application/vnd.ms-powerpoint.presentation.macroenabled.12": "pptm",
|
||||
"application/vnd.ms-powerpoint.slide.macroenabled.12": "sldm",
|
||||
"application/vnd.ms-powerpoint.slideshow.macroenabled.12": "ppsm",
|
||||
"application/vnd.ms-powerpoint.template.macroenabled.12": "potm",
|
||||
"application/vnd.ms-project": "mpp",
|
||||
"application/vnd.ms-word.document.macroenabled.12": "docm",
|
||||
"application/vnd.ms-word.template.macroenabled.12": "dotm",
|
||||
"application/vnd.ms-works": "wps",
|
||||
"application/vnd.ms-wpl": "wpl",
|
||||
"application/vnd.ms-xpsdocument": "xps",
|
||||
"application/vnd.mseq": "mseq",
|
||||
"application/vnd.musician": "mus",
|
||||
"application/vnd.muvee.style": "msty",
|
||||
"application/vnd.mynfc": "taglet",
|
||||
"application/vnd.neurolanguage.nlu": "nlu",
|
||||
"application/vnd.nitf": "ntf",
|
||||
"application/vnd.noblenet-directory": "nnd",
|
||||
"application/vnd.noblenet-sealer": "nns",
|
||||
"application/vnd.noblenet-web": "nnw",
|
||||
"application/vnd.nokia.n-gage.data": "ngdat",
|
||||
"application/vnd.nokia.n-gage.symbian.install": "n-gage",
|
||||
"application/vnd.nokia.radio-preset": "rpst",
|
||||
"application/vnd.nokia.radio-presets": "rpss",
|
||||
"application/vnd.novadigm.edm": "edm",
|
||||
"application/vnd.novadigm.edx": "edx",
|
||||
"application/vnd.novadigm.ext": "ext",
|
||||
"application/vnd.oasis.opendocument.chart": "odc",
|
||||
"application/vnd.oasis.opendocument.chart-template": "otc",
|
||||
"application/vnd.oasis.opendocument.database": "odb",
|
||||
"application/vnd.oasis.opendocument.formula": "odf",
|
||||
"application/vnd.oasis.opendocument.formula-template": "odft",
|
||||
"application/vnd.oasis.opendocument.graphics": "odg",
|
||||
"application/vnd.oasis.opendocument.graphics-template": "otg",
|
||||
"application/vnd.oasis.opendocument.image": "odi",
|
||||
"application/vnd.oasis.opendocument.image-template": "oti",
|
||||
"application/vnd.oasis.opendocument.presentation": "odp",
|
||||
"application/vnd.oasis.opendocument.presentation-template": "otp",
|
||||
"application/vnd.oasis.opendocument.spreadsheet": "ods",
|
||||
"application/vnd.oasis.opendocument.spreadsheet-template": "ots",
|
||||
"application/vnd.oasis.opendocument.text": "odt",
|
||||
"application/vnd.oasis.opendocument.text-master": "odm",
|
||||
"application/vnd.oasis.opendocument.text-template": "ott",
|
||||
"application/vnd.oasis.opendocument.text-web": "oth",
|
||||
"application/vnd.olpc-sugar": "xo",
|
||||
"application/vnd.oma.dd2+xml": "dd2",
|
||||
"application/vnd.openofficeorg.extension": "oxt",
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.presentation": "pptx",
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.slide": "sldx",
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.slideshow": "ppsx",
|
||||
"application/vnd.openxmlformats-officedocument.presentationml.template": "potx",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet": "xlsx",
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.template": "xltx",
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document": "docx",
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.template": "dotx",
|
||||
"application/vnd.osgeo.mapguide.package": "mgp",
|
||||
"application/vnd.osgi.dp": "dp",
|
||||
"application/vnd.osgi.subsystem": "esa",
|
||||
"application/vnd.palm": "pdb",
|
||||
"application/vnd.pawaafile": "paw",
|
||||
"application/vnd.pg.format": "str",
|
||||
"application/vnd.pg.osasli": "ei6",
|
||||
"application/vnd.picsel": "efif",
|
||||
"application/vnd.pmi.widget": "wg",
|
||||
"application/vnd.pocketlearn": "plf",
|
||||
"application/vnd.powerbuilder6": "pbd",
|
||||
"application/vnd.previewsystems.box": "box",
|
||||
"application/vnd.proteus.magazine": "mgz",
|
||||
"application/vnd.publishare-delta-tree": "qps",
|
||||
"application/vnd.pvi.ptid1": "ptid",
|
||||
"application/vnd.quark.quarkxpress": "qxd",
|
||||
"application/vnd.realvnc.bed": "bed",
|
||||
"application/vnd.recordare.musicxml": "mxl",
|
||||
"application/vnd.recordare.musicxml+xml": "musicxml",
|
||||
"application/vnd.rig.cryptonote": "cryptonote",
|
||||
"application/vnd.rim.cod": "cod",
|
||||
"application/vnd.rn-realmedia": "rm",
|
||||
"application/vnd.rn-realmedia-vbr": "rmvb",
|
||||
"application/vnd.route66.link66+xml": "link66",
|
||||
"application/vnd.sailingtracker.track": "st",
|
||||
"application/vnd.seemail": "see",
|
||||
"application/vnd.sema": "sema",
|
||||
"application/vnd.semd": "semd",
|
||||
"application/vnd.semf": "semf",
|
||||
"application/vnd.shana.informed.formdata": "ifm",
|
||||
"application/vnd.shana.informed.formtemplate": "itp",
|
||||
"application/vnd.shana.informed.interchange": "iif",
|
||||
"application/vnd.shana.informed.package": "ipk",
|
||||
"application/vnd.simtech-mindmapper": "twd",
|
||||
"application/vnd.smaf": "mmf",
|
||||
"application/vnd.smart.teacher": "teacher",
|
||||
"application/vnd.solent.sdkm+xml": "sdkm",
|
||||
"application/vnd.spotfire.dxp": "dxp",
|
||||
"application/vnd.spotfire.sfs": "sfs",
|
||||
"application/vnd.stardivision.calc": "sdc",
|
||||
"application/vnd.stardivision.draw": "sda",
|
||||
"application/vnd.stardivision.impress": "sdd",
|
||||
"application/vnd.stardivision.math": "smf",
|
||||
"application/vnd.stardivision.writer": "sdw",
|
||||
"application/vnd.stardivision.writer-global": "sgl",
|
||||
"application/vnd.stepmania.package": "smzip",
|
||||
"application/vnd.stepmania.stepchart": "sm",
|
||||
"application/vnd.sun.xml.calc": "sxc",
|
||||
"application/vnd.sun.xml.calc.template": "stc",
|
||||
"application/vnd.sun.xml.draw": "sxd",
|
||||
"application/vnd.sun.xml.draw.template": "std",
|
||||
"application/vnd.sun.xml.impress": "sxi",
|
||||
"application/vnd.sun.xml.impress.template": "sti",
|
||||
"application/vnd.sun.xml.math": "sxm",
|
||||
"application/vnd.sun.xml.writer": "sxw",
|
||||
"application/vnd.sun.xml.writer.global": "sxg",
|
||||
"application/vnd.sun.xml.writer.template": "stw",
|
||||
"application/vnd.sus-calendar": "sus",
|
||||
"application/vnd.svd": "svd",
|
||||
"application/vnd.symbian.install": "sis",
|
||||
"application/vnd.syncml+xml": "xsm",
|
||||
"application/vnd.syncml.dm+wbxml": "bdm",
|
||||
"application/vnd.syncml.dm+xml": "xdm",
|
||||
"application/vnd.tao.intent-module-archive": "tao",
|
||||
"application/vnd.tcpdump.pcap": "pcap",
|
||||
"application/vnd.tmobile-livetv": "tmo",
|
||||
"application/vnd.trid.tpt": "tpt",
|
||||
"application/vnd.triscape.mxs": "mxs",
|
||||
"application/vnd.trueapp": "tra",
|
||||
"application/vnd.ufdl": "ufdl",
|
||||
"application/vnd.uiq.theme": "utz",
|
||||
"application/vnd.umajin": "umj",
|
||||
"application/vnd.unity": "unityweb",
|
||||
"application/vnd.uoml+xml": "uoml",
|
||||
"application/vnd.vcx": "vcx",
|
||||
"application/vnd.visio": "vsd",
|
||||
"application/vnd.visionary": "vis",
|
||||
"application/vnd.vsf": "vsf",
|
||||
"application/vnd.wap.wbxml": "wbxml",
|
||||
"application/vnd.wap.wmlc": "wmlc",
|
||||
"application/vnd.wap.wmlscriptc": "wmlsc",
|
||||
"application/vnd.webturbo": "wtb",
|
||||
"application/vnd.wolfram.player": "nbp",
|
||||
"application/vnd.wordperfect": "wpd",
|
||||
"application/vnd.wqd": "wqd",
|
||||
"application/vnd.wt.stf": "stf",
|
||||
"application/vnd.xara": "xar",
|
||||
"application/vnd.xfdl": "xfdl",
|
||||
"application/vnd.yamaha.hv-dic": "hvd",
|
||||
"application/vnd.yamaha.hv-script": "hvs",
|
||||
"application/vnd.yamaha.hv-voice": "hvp",
|
||||
"application/vnd.yamaha.openscoreformat": "osf",
|
||||
"application/vnd.yamaha.openscoreformat.osfpvg+xml": "osfpvg",
|
||||
"application/vnd.yamaha.smaf-audio": "saf",
|
||||
"application/vnd.yamaha.smaf-phrase": "spf",
|
||||
"application/vnd.yellowriver-custom-menu": "cmp",
|
||||
"application/vnd.zul": "zir",
|
||||
"application/vnd.zzazz.deck+xml": "zaz",
|
||||
"application/voicexml+xml": "vxml",
|
||||
"application/widget": "wgt",
|
||||
"application/winhlp": "hlp",
|
||||
"application/wsdl+xml": "wsdl",
|
||||
"application/wspolicy+xml": "wspolicy",
|
||||
"application/x-7z-compressed": "7z",
|
||||
"application/x-abiword": "abw",
|
||||
"application/x-ace-compressed": "ace",
|
||||
"application/x-apple-diskimage": "dmg",
|
||||
"application/x-authorware-bin": "aab",
|
||||
"application/x-authorware-map": "aam",
|
||||
"application/x-authorware-seg": "aas",
|
||||
"application/x-bcpio": "bcpio",
|
||||
"application/x-bittorrent": "torrent",
|
||||
"application/x-blorb": "blb",
|
||||
"application/x-bzip": "bz",
|
||||
"application/x-bzip2": "bz2",
|
||||
"application/x-cbr": "cbr",
|
||||
"application/x-cdlink": "vcd",
|
||||
"application/x-cfs-compressed": "cfs",
|
||||
"application/x-chat": "chat",
|
||||
"application/x-chess-pgn": "pgn",
|
||||
"application/x-conference": "nsc",
|
||||
"application/x-cpio": "cpio",
|
||||
"application/x-csh": "csh",
|
||||
"application/x-debian-package": "deb",
|
||||
"application/x-dgc-compressed": "dgc",
|
||||
"application/x-director": "dir",
|
||||
"application/x-doom": "wad",
|
||||
"application/x-dtbncx+xml": "ncx",
|
||||
"application/x-dtbook+xml": "dtb",
|
||||
"application/x-dtbresource+xml": "res",
|
||||
"application/x-dvi": "dvi",
|
||||
"application/x-envoy": "evy",
|
||||
"application/x-eva": "eva",
|
||||
"application/x-font-bdf": "bdf",
|
||||
"application/x-font-ghostscript": "gsf",
|
||||
"application/x-font-linux-psf": "psf",
|
||||
"application/x-font-otf": "otf",
|
||||
"application/x-font-pcf": "pcf",
|
||||
"application/x-font-snf": "snf",
|
||||
"application/x-font-ttf": "ttf",
|
||||
"application/x-font-type1": "pfa",
|
||||
"application/x-font-woff": "woff",
|
||||
"application/x-freearc": "arc",
|
||||
"application/x-futuresplash": "spl",
|
||||
"application/x-gca-compressed": "gca",
|
||||
"application/x-glulx": "ulx",
|
||||
"application/x-gnumeric": "gnumeric",
|
||||
"application/x-gramps-xml": "gramps",
|
||||
"application/x-gtar": "gtar",
|
||||
"application/x-hdf": "hdf",
|
||||
"application/x-install-instructions": "install",
|
||||
"application/x-iso9660-image": "iso",
|
||||
"application/x-java-jnlp-file": "jnlp",
|
||||
"application/x-latex": "latex",
|
||||
"application/x-lzh-compressed": "lzh",
|
||||
"application/x-mie": "mie",
|
||||
"application/x-mobipocket-ebook": "prc",
|
||||
"application/x-ms-application": "application",
|
||||
"application/x-ms-shortcut": "lnk",
|
||||
"application/x-ms-wmd": "wmd",
|
||||
"application/x-ms-wmz": "wmz",
|
||||
"application/x-ms-xbap": "xbap",
|
||||
"application/x-msaccess": "mdb",
|
||||
"application/x-msbinder": "obd",
|
||||
"application/x-mscardfile": "crd",
|
||||
"application/x-msclip": "clp",
|
||||
"application/x-msdownload": "exe",
|
||||
"application/x-msmediaview": "mvb",
|
||||
"application/x-msmetafile": "wmf",
|
||||
"application/x-msmoney": "mny",
|
||||
"application/x-mspublisher": "pub",
|
||||
"application/x-msschedule": "scd",
|
||||
"application/x-msterminal": "trm",
|
||||
"application/x-mswrite": "wri",
|
||||
"application/x-netcdf": "nc",
|
||||
"application/x-nzb": "nzb",
|
||||
"application/x-pkcs12": "p12",
|
||||
"application/x-pkcs7-certificates": "p7b",
|
||||
"application/x-pkcs7-certreqresp": "p7r",
|
||||
"application/x-rar-compressed": "rar",
|
||||
"application/x-research-info-systems": "ris",
|
||||
"application/x-sh": "sh",
|
||||
"application/x-shar": "shar",
|
||||
"application/x-shockwave-flash": "swf",
|
||||
"application/x-silverlight-app": "xap",
|
||||
"application/x-sql": "sql",
|
||||
"application/x-stuffit": "sit",
|
||||
"application/x-stuffitx": "sitx",
|
||||
"application/x-subrip": "srt",
|
||||
"application/x-sv4cpio": "sv4cpio",
|
||||
"application/x-sv4crc": "sv4crc",
|
||||
"application/x-t3vm-image": "t3",
|
||||
"application/x-tads": "gam",
|
||||
"application/x-tar": "tar",
|
||||
"application/x-tcl": "tcl",
|
||||
"application/x-tex": "tex",
|
||||
"application/x-tex-tfm": "tfm",
|
||||
"application/x-texinfo": "texinfo",
|
||||
"application/x-tgif": "obj",
|
||||
"application/x-ustar": "ustar",
|
||||
"application/x-wais-source": "src",
|
||||
"application/x-x509-ca-cert": "der",
|
||||
"application/x-xfig": "fig",
|
||||
"application/x-xliff+xml": "xlf",
|
||||
"application/x-xpinstall": "xpi",
|
||||
"application/x-xz": "xz",
|
||||
"application/x-zmachine": "z1",
|
||||
"application/xaml+xml": "xaml",
|
||||
"application/xcap-diff+xml": "xdf",
|
||||
"application/xenc+xml": "xenc",
|
||||
"application/xhtml+xml": "xhtml",
|
||||
"application/xml": "xml",
|
||||
"application/xml-dtd": "dtd",
|
||||
"application/xop+xml": "xop",
|
||||
"application/xproc+xml": "xpl",
|
||||
"application/xslt+xml": "xslt",
|
||||
"application/xspf+xml": "xspf",
|
||||
"application/xv+xml": "mxml",
|
||||
"application/yang": "yang",
|
||||
"application/yin+xml": "yin",
|
||||
"application/zip": "zip",
|
||||
"audio/adpcm": "adp",
|
||||
"audio/basic": "au",
|
||||
"audio/midi": "mid",
|
||||
"audio/mp4": "mp4a",
|
||||
"audio/mpeg": "mpga",
|
||||
"audio/ogg": "oga",
|
||||
"audio/s3m": "s3m",
|
||||
"audio/silk": "sil",
|
||||
"audio/vnd.dece.audio": "uva",
|
||||
"audio/vnd.digital-winds": "eol",
|
||||
"audio/vnd.dra": "dra",
|
||||
"audio/vnd.dts": "dts",
|
||||
"audio/vnd.dts.hd": "dtshd",
|
||||
"audio/vnd.lucent.voice": "lvp",
|
||||
"audio/vnd.ms-playready.media.pya": "pya",
|
||||
"audio/vnd.nuera.ecelp4800": "ecelp4800",
|
||||
"audio/vnd.nuera.ecelp7470": "ecelp7470",
|
||||
"audio/vnd.nuera.ecelp9600": "ecelp9600",
|
||||
"audio/vnd.rip": "rip",
|
||||
"audio/webm": "weba",
|
||||
"audio/x-aac": "aac",
|
||||
"audio/x-aiff": "aif",
|
||||
"audio/x-caf": "caf",
|
||||
"audio/x-flac": "flac",
|
||||
"audio/x-matroska": "mka",
|
||||
"audio/x-mpegurl": "m3u",
|
||||
"audio/x-ms-wax": "wax",
|
||||
"audio/x-ms-wma": "wma",
|
||||
"audio/x-pn-realaudio": "ram",
|
||||
"audio/x-pn-realaudio-plugin": "rmp",
|
||||
"audio/x-wav": "wav",
|
||||
"audio/xm": "xm",
|
||||
"chemical/x-cdx": "cdx",
|
||||
"chemical/x-cif": "cif",
|
||||
"chemical/x-cmdf": "cmdf",
|
||||
"chemical/x-cml": "cml",
|
||||
"chemical/x-csml": "csml",
|
||||
"chemical/x-xyz": "xyz",
|
||||
"image/bmp": "bmp",
|
||||
"image/cgm": "cgm",
|
||||
"image/g3fax": "g3",
|
||||
"image/gif": "gif",
|
||||
"image/ief": "ief",
|
||||
"image/jpeg": "jpg",
|
||||
"image/ktx": "ktx",
|
||||
"image/png": "png",
|
||||
"image/prs.btif": "btif",
|
||||
"image/sgi": "sgi",
|
||||
"image/svg+xml": "svg",
|
||||
"image/tiff": "tiff",
|
||||
"image/vnd.adobe.photoshop": "psd",
|
||||
"image/vnd.dece.graphic": "uvi",
|
||||
"image/vnd.dvb.subtitle": "sub",
|
||||
"image/vnd.djvu": "djvu",
|
||||
"image/vnd.dwg": "dwg",
|
||||
"image/vnd.dxf": "dxf",
|
||||
"image/vnd.fastbidsheet": "fbs",
|
||||
"image/vnd.fpx": "fpx",
|
||||
"image/vnd.fst": "fst",
|
||||
"image/vnd.fujixerox.edmics-mmr": "mmr",
|
||||
"image/vnd.fujixerox.edmics-rlc": "rlc",
|
||||
"image/vnd.ms-modi": "mdi",
|
||||
"image/vnd.ms-photo": "wdp",
|
||||
"image/vnd.net-fpx": "npx",
|
||||
"image/vnd.wap.wbmp": "wbmp",
|
||||
"image/vnd.xiff": "xif",
|
||||
"image/webp": "webp",
|
||||
"image/x-3ds": "3ds",
|
||||
"image/x-cmu-raster": "ras",
|
||||
"image/x-cmx": "cmx",
|
||||
"image/x-freehand": "fh",
|
||||
"image/x-icon": "ico",
|
||||
"image/x-mrsid-image": "sid",
|
||||
"image/x-pcx": "pcx",
|
||||
"image/x-pict": "pic",
|
||||
"image/x-portable-anymap": "pnm",
|
||||
"image/x-portable-bitmap": "pbm",
|
||||
"image/x-portable-graymap": "pgm",
|
||||
"image/x-portable-pixmap": "ppm",
|
||||
"image/x-rgb": "rgb",
|
||||
"image/x-tga": "tga",
|
||||
"image/x-xbitmap": "xbm",
|
||||
"image/x-xpixmap": "xpm",
|
||||
"image/x-xwindowdump": "xwd",
|
||||
"message/rfc822": "eml",
|
||||
"model/iges": "igs",
|
||||
"model/mesh": "mesh",
|
||||
"model/vnd.collada+xml": "dae",
|
||||
"model/vnd.dwf": "dwf",
|
||||
"model/vnd.gdl": "gdl",
|
||||
"model/vnd.gtw": "gtw",
|
||||
"model/vnd.mts": "mts",
|
||||
"model/vnd.vtu": "vtu",
|
||||
"model/vrml": "wrl",
|
||||
"model/x3d+binary": "x3db",
|
||||
"model/x3d+vrml": "x3dv",
|
||||
"model/x3d+xml": "x3d",
|
||||
"text/cache-manifest": "appcache",
|
||||
"text/calendar": "ics",
|
||||
"text/css": "css",
|
||||
"text/csv": "csv",
|
||||
"text/html": "html",
|
||||
"text/n3": "n3",
|
||||
"text/plain": "txt",
|
||||
"text/prs.lines.tag": "dsc",
|
||||
"text/richtext": "rtx",
|
||||
"text/sgml": "sgml",
|
||||
"text/tab-separated-values": "tsv",
|
||||
"text/troff": "t",
|
||||
"text/turtle": "ttl",
|
||||
"text/uri-list": "uri",
|
||||
"text/vcard": "vcard",
|
||||
"text/vnd.curl": "curl",
|
||||
"text/vnd.curl.dcurl": "dcurl",
|
||||
"text/vnd.curl.scurl": "scurl",
|
||||
"text/vnd.curl.mcurl": "mcurl",
|
||||
"text/vnd.dvb.subtitle": "sub",
|
||||
"text/vnd.fly": "fly",
|
||||
"text/vnd.fmi.flexstor": "flx",
|
||||
"text/vnd.graphviz": "gv",
|
||||
"text/vnd.in3d.3dml": "3dml",
|
||||
"text/vnd.in3d.spot": "spot",
|
||||
"text/vnd.sun.j2me.app-descriptor": "jad",
|
||||
"text/vnd.wap.wml": "wml",
|
||||
"text/vnd.wap.wmlscript": "wmls",
|
||||
"text/x-asm": "s",
|
||||
"text/x-c": "c",
|
||||
"text/x-fortran": "f",
|
||||
"text/x-java-source": "java",
|
||||
"text/x-opml": "opml",
|
||||
"text/x-pascal": "p",
|
||||
"text/x-nfo": "nfo",
|
||||
"text/x-shellscript": "sh",
|
||||
"text/x-setext": "etx",
|
||||
"text/x-sfv": "sfv",
|
||||
"text/x-tex": "tex",
|
||||
"text/x-uuencode": "uu",
|
||||
"text/x-vcalendar": "vcs",
|
||||
"text/x-vcard": "vcf",
|
||||
"video/3gpp": "3gp",
|
||||
"video/3gpp2": "3g2",
|
||||
"video/h261": "h261",
|
||||
"video/h263": "h263",
|
||||
"video/h264": "h264",
|
||||
"video/jpeg": "jpgv",
|
||||
"video/jpm": "jpm",
|
||||
"video/mj2": "mj2",
|
||||
"video/mp4": "mp4",
|
||||
"video/mpeg": "mpeg",
|
||||
"video/ogg": "ogv",
|
||||
"video/quicktime": "qt",
|
||||
"video/vnd.dece.hd": "uvh",
|
||||
"video/vnd.dece.mobile": "uvm",
|
||||
"video/vnd.dece.pd": "uvp",
|
||||
"video/vnd.dece.sd": "uvs",
|
||||
"video/vnd.dece.video": "uvv",
|
||||
"video/vnd.dvb.file": "dvb",
|
||||
"video/vnd.fvt": "fvt",
|
||||
"video/vnd.mpegurl": "m4u",
|
||||
"video/vnd.ms-playready.media.pyv": "pyv",
|
||||
"video/vnd.uvvu.mp4": "uvvu",
|
||||
"video/vnd.vivo": "viv",
|
||||
"video/webm": "webm",
|
||||
"video/x-f4v": "f4v",
|
||||
"video/x-fli": "fli",
|
||||
"video/x-flv": "flv",
|
||||
"video/x-m4v": "m4v",
|
||||
"video/x-matroska": "mkv",
|
||||
"video/x-mng": "mng",
|
||||
"video/x-ms-asf": "asf",
|
||||
"video/x-ms-vob": "vob",
|
||||
"video/x-ms-wm": "wm",
|
||||
"video/x-ms-wmv": "wmv",
|
||||
"video/x-ms-wmx": "wmx",
|
||||
"video/x-ms-wvx": "wvx",
|
||||
"video/x-msvideo": "avi",
|
||||
"video/x-sgi-movie": "movie",
|
||||
"video/x-smv": "smv",
|
||||
"x-conference/x-cooltalk": "ice",
|
||||
}
|
||||
|
||||
var extensionToAce = map[string]string{
|
||||
"c": "c_cpp",
|
||||
"h": "c_cpp",
|
||||
"cpp": "c_cpp",
|
||||
"clj": "clojure",
|
||||
"coffee": "coffee",
|
||||
"cfc": "coldfusion",
|
||||
"cs": "csharp",
|
||||
"sh": "sh",
|
||||
"bash": "sh",
|
||||
"css": "css",
|
||||
"go": "golang",
|
||||
"diff": "diff",
|
||||
"html": "html",
|
||||
"xml": "xml",
|
||||
"ini": "ini",
|
||||
"java": "java",
|
||||
"js": "javascript",
|
||||
"json": "json",
|
||||
"jsp": "jsp",
|
||||
"tex": "latex",
|
||||
"lisp": "lisp",
|
||||
"less": "less",
|
||||
"lua": "lua",
|
||||
"md": "markdown",
|
||||
"ocaml": "ocaml",
|
||||
"tcl": "tcl",
|
||||
"yaml": "yaml",
|
||||
"php": "php",
|
||||
"pl": "perl",
|
||||
"py": "python",
|
||||
"rb": "ruby",
|
||||
"sql": "sql",
|
||||
"apache": "apache",
|
||||
"cmake": "cmake",
|
||||
"bat": "dos",
|
||||
"scala": "scala",
|
||||
"txt": "text",
|
||||
}
|
||||
|
||||
var extensionToHl = map[string]string{
|
||||
"c": "cpp",
|
||||
"h": "cpp",
|
||||
"cpp": "c_cpp",
|
||||
"clj": "clojure",
|
||||
"coffee": "coffee",
|
||||
"cfc": "coldfusion",
|
||||
"cs": "csharp",
|
||||
"sh": "sh",
|
||||
"bash": "sh",
|
||||
"css": "css",
|
||||
"go": "go",
|
||||
"diff": "diff",
|
||||
"html": "html",
|
||||
"htm": "html",
|
||||
"ini": "ini",
|
||||
"java": "java",
|
||||
"js": "javascript",
|
||||
"json": "json",
|
||||
"jsp": "jsp",
|
||||
"tex": "latex",
|
||||
"lisp": "lisp",
|
||||
"less": "less",
|
||||
"lua": "lua",
|
||||
"ocaml": "ocaml",
|
||||
"tcl": "tcl",
|
||||
"nginx": "nginx",
|
||||
"xml": "xml",
|
||||
"yaml": "yaml",
|
||||
"php": "php",
|
||||
"pl": "perl",
|
||||
"py": "python",
|
||||
"rb": "ruby",
|
||||
"sql": "sql",
|
||||
"apache": "apache",
|
||||
"cmake": "cmake",
|
||||
"bat": "dos",
|
||||
"scala": "scala",
|
||||
"txt": "text",
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue