Compare commits
59 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9a76ff358f | ||
![]() |
282c63153d | ||
![]() |
926d9bf458 | ||
![]() |
2ccd06b9d7 | ||
![]() |
e41270ab91 | ||
![]() |
94f63c2045 | ||
![]() |
486cc6ff77 | ||
![]() |
91b9885ac6 | ||
![]() |
ef99024433 | ||
![]() |
9a5fc11dff | ||
![]() |
456274c1b9 | ||
![]() |
a2e00d06e0 | ||
![]() |
965d5f6c29 | ||
![]() |
8ed205181a | ||
![]() |
5eb6f32ff0 | ||
![]() |
6ce2bd6b9f | ||
![]() |
e2a65a5b62 | ||
![]() |
151515f516 | ||
![]() |
bc599ae018 | ||
![]() |
8edf53c142 | ||
![]() |
9f7e6b6eea | ||
![]() |
3167ab7ff0 | ||
![]() |
2e0319b602 | ||
![]() |
2b21a5bb81 | ||
![]() |
49d45f52bb | ||
![]() |
7212cd6555 | ||
![]() |
53e7aa4568 | ||
![]() |
7543c82473 | ||
![]() |
a0c508c422 | ||
![]() |
c61db30f9f | ||
![]() |
fb0d44f132 | ||
![]() |
9f6346a373 | ||
![]() |
a4c0e0ac37 | ||
![]() |
68a80d3698 | ||
![]() |
9a8bb1bb7f | ||
![]() |
773eb877bd | ||
![]() |
7ca3331044 | ||
![]() |
e4468715ac | ||
![]() |
e06e9ad6fe | ||
![]() |
a4240680c8 | ||
![]() |
779fc5c425 | ||
![]() |
597bec430c | ||
![]() |
fabb8f2dd7 | ||
![]() |
a86a144485 | ||
![]() |
c53b850aef | ||
![]() |
4baaa0a15f | ||
![]() |
3947197225 | ||
![]() |
5a6e589500 | ||
![]() |
4361c44128 | ||
![]() |
b63274ad01 | ||
![]() |
cf0a30f46a | ||
![]() |
1d583bcb48 | ||
![]() |
85be5b078d | ||
![]() |
0e763cb776 | ||
![]() |
bf090c5119 | ||
![]() |
50c1bdc018 | ||
![]() |
872340e0dc | ||
![]() |
8098b7e39e | ||
![]() |
2c0b2b2e79 |
50 changed files with 1461 additions and 577 deletions
60
.github/workflows/buildx.yaml
vendored
Normal file
60
.github/workflows/buildx.yaml
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
name: buildx
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
buildx:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
-
|
||||
name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
-
|
||||
name: Prepare
|
||||
id: prepare
|
||||
run: |
|
||||
DOCKER_IMAGE=andreimarcu/linx-server
|
||||
DOCKER_PLATFORMS=linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64/v8,linux/386
|
||||
VERSION=version-${GITHUB_REF#refs/tags/v}
|
||||
TAGS="--tag ${DOCKER_IMAGE}:${VERSION} --tag ${DOCKER_IMAGE}:latest"
|
||||
|
||||
echo ::set-output name=docker_image::${DOCKER_IMAGE}
|
||||
echo ::set-output name=version::${VERSION}
|
||||
echo ::set-output name=buildx_args::--platform ${DOCKER_PLATFORMS} \
|
||||
--build-arg VERSION=${VERSION} \
|
||||
--build-arg BUILD_DATE=$(date -u +'%Y-%m-%dT%H:%M:%SZ') \
|
||||
--build-arg VCS_REF=${GITHUB_SHA::8} \
|
||||
${TAGS} --file Dockerfile .
|
||||
-
|
||||
name: Set up Docker Buildx
|
||||
uses: crazy-max/ghaction-docker-buildx@v3
|
||||
-
|
||||
name: Docker Buildx (build)
|
||||
run: |
|
||||
docker buildx build --output "type=image,push=false" ${{ steps.prepare.outputs.buildx_args }}
|
||||
-
|
||||
name: Docker Login
|
||||
if: success()
|
||||
env:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}
|
||||
run: |
|
||||
echo "${DOCKER_PASSWORD}" | docker login --username "${DOCKER_USERNAME}" --password-stdin
|
||||
-
|
||||
name: Docker Buildx (push)
|
||||
if: success()
|
||||
run: |
|
||||
docker buildx build --output "type=image,push=true" ${{ steps.prepare.outputs.buildx_args }}
|
||||
-
|
||||
name: Docker Check Manifest
|
||||
if: always()
|
||||
run: |
|
||||
docker run --rm mplatform/mquery ${{ steps.prepare.outputs.docker_image }}:${{ steps.prepare.outputs.version }}
|
||||
-
|
||||
name: Clear
|
||||
if: always()
|
||||
run: |
|
||||
rm -f ${HOME}/.docker/config.json
|
8
.gitignore
vendored
8
.gitignore
vendored
|
@ -20,6 +20,8 @@ _cgo_defun.c
|
|||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
.DS_Store
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
|
@ -27,7 +29,11 @@ _testmain.go
|
|||
*.prof
|
||||
|
||||
linx-server
|
||||
linx-cleanup/linx-cleanup
|
||||
linx-genkey/linx-genkey
|
||||
linx-server.conf
|
||||
files/
|
||||
meta/
|
||||
binaries/
|
||||
linx-cleanup
|
||||
custom_pages/
|
||||
authfile
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- "1.10"
|
||||
- "1.11"
|
||||
- "1.14"
|
||||
|
||||
before_script:
|
||||
- go vet ./...
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM golang:alpine3.8 AS build
|
||||
FROM golang:1.14-alpine3.11 AS build
|
||||
|
||||
COPY . /go/src/github.com/andreimarcu/linx-server
|
||||
WORKDIR /go/src/github.com/andreimarcu/linx-server
|
||||
|
@ -8,7 +8,7 @@ RUN set -ex \
|
|||
&& go get -v . \
|
||||
&& apk del .build-deps
|
||||
|
||||
FROM alpine:3.8
|
||||
FROM alpine:3.11
|
||||
|
||||
COPY --from=build /go/bin/linx-server /usr/local/bin/linx-server
|
||||
|
||||
|
|
199
README.md
199
README.md
|
@ -1,101 +1,163 @@
|
|||
# Development on this repository has been frozen.
|
||||
|
||||
linx-server
|
||||
Feel free to send a pull request if you are maintaining an active fork of this project to add a link to your repository in this readme.
|
||||
|
||||
|
||||
### Active Forks
|
||||
- ZizzyDizzyMC: [https://github.com/ZizzyDizzyMC/linx-server/](https://github.com/ZizzyDizzyMC/linx-server/)
|
||||
- Seb3thehacker: [https://github.com/Seb3thehacker/linx-server](https://github.com/Seb3thehacker/linx-server)
|
||||
|
||||
|
||||
---
|
||||
|
||||
|
||||
linx-server
|
||||
======
|
||||
[](https://travis-ci.org/andreimarcu/linx-server)
|
||||
|
||||
Self-hosted file/media sharing website.
|
||||
|
||||
### Clients
|
||||
**Official**
|
||||
- CLI: **linx-client** - [Source](https://github.com/andreimarcu/linx-client)
|
||||
|
||||
**Unofficial**
|
||||
- Android: **LinxShare** - [Source](https://github.com/iksteen/LinxShare/) | [Google Play](https://play.google.com/store/apps/details?id=org.thegraveyard.linxshare)
|
||||
- CLI: **golinx** - [Source](https://github.com/mutantmonkey/golinx)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
- Display common filetypes (image, video, audio, markdown, pdf)
|
||||
- Display syntax-highlighted code with in-place editing
|
||||
- Documented API with keys if need to restrict uploads (can use [linx-client](https://github.com/andreimarcu/linx-client) for uploading through command-line)
|
||||
- Documented API with keys for restricting uploads
|
||||
- Torrent download of files using web seeding
|
||||
- File expiry, deletion key, and random filename options
|
||||
- File expiry, deletion key, file access key, and random filename options
|
||||
|
||||
|
||||
### Screenshots
|
||||
<img width="200" src="https://user-images.githubusercontent.com/4650950/51735725-0033cf00-203d-11e9-8a97-f543330a92ec.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735724-0033cf00-203d-11e9-8fe0-77442eaa8705.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735726-0033cf00-203d-11e9-9fca-095a97e46ce8.png" /> <img width="200" src="https://user-images.githubusercontent.com/4650950/51735728-0033cf00-203d-11e9-90e9-4f2d36332fc4.png" />
|
||||
<img width="730" src="https://user-images.githubusercontent.com/4650950/76579039-03c82680-6488-11ea-8e23-4c927386fbd9.png" />
|
||||
|
||||
<img width="180" src="https://user-images.githubusercontent.com/4650950/76578903-771d6880-6487-11ea-8baf-a4a23fef4d26.png" /> <img width="180" src="https://user-images.githubusercontent.com/4650950/76578910-7be21c80-6487-11ea-9a0a-587d59bc5f80.png" /> <img width="180" src="https://user-images.githubusercontent.com/4650950/76578908-7b498600-6487-11ea-8994-ee7b6eb9cdb1.png" /> <img width="180" src="https://user-images.githubusercontent.com/4650950/76578907-7b498600-6487-11ea-8941-8f582bf87fb0.png" />
|
||||
|
||||
|
||||
Get release and run
|
||||
Getting started
|
||||
-------------------
|
||||
1. Grab the latest binary from the [releases](https://github.com/andreimarcu/linx-server/releases)
|
||||
2. Run ```./linx-server```
|
||||
|
||||
#### Using Docker
|
||||
1. Create directories ```files``` and ```meta``` and run ```chown -R 65534:65534 meta && chown -R 65534:65534 files```
|
||||
2. Create a config file (example provided in repo), we'll refer to it as __linx-server.conf__ in the following examples
|
||||
|
||||
|
||||
|
||||
Example running
|
||||
```
|
||||
docker run -p 8080:8080 -v /path/to/linx-server.conf:/data/linx-server.conf -v /path/to/meta:/data/meta -v /path/to/files:/data/files andreimarcu/linx-server -config /data/linx-server.conf
|
||||
```
|
||||
|
||||
Example with docker-compose
|
||||
```
|
||||
version: '2.2'
|
||||
services:
|
||||
linx-server:
|
||||
container_name: linx-server
|
||||
image: andreimarcu/linx-server
|
||||
command: -config /data/linx-server.conf
|
||||
volumes:
|
||||
- /path/to/files:/data/files
|
||||
- /path/to/meta:/data/meta
|
||||
- /path/to/linx-server.conf:/data/linx-server.conf
|
||||
network_mode: bridge
|
||||
ports:
|
||||
- "8080:8080"
|
||||
restart: unless-stopped
|
||||
```
|
||||
Ideally, you would use a reverse proxy such as nginx or caddy to handle TLS certificates.
|
||||
|
||||
#### Using a binary release
|
||||
|
||||
1. Grab the latest binary from the [releases](https://github.com/andreimarcu/linx-server/releases), then run ```go install```
|
||||
2. Run ```linx-server -config path/to/linx-server.conf```
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
#### Configuration
|
||||
All configuration options are accepted either as arguments or can be placed in an ini-style file as such:
|
||||
All configuration options are accepted either as arguments or can be placed in a file as such (see example file linx-server.conf.example in repo):
|
||||
```ini
|
||||
bind = 127.0.0.1:8080
|
||||
sitename = myLinx
|
||||
maxsize = 4294967296
|
||||
allowhotlink = true
|
||||
# etc
|
||||
```
|
||||
...and then invoke ```linx-server -config path/to/config.ini```
|
||||
maxexpiry = 86400
|
||||
# ... etc
|
||||
```
|
||||
...and then run ```linx-server -config path/to/linx-server.conf```
|
||||
|
||||
#### Options
|
||||
- ```-bind 127.0.0.1:8080``` -- what to bind to (default is 127.0.0.1:8080)
|
||||
- ```-sitename myLinx``` -- the site name displayed on top (default is inferred from Host header)
|
||||
- ```-siteurl "https://mylinx.example.org/"``` -- the site url (default is inferred from execution context)
|
||||
- ```-selifpath "selif"``` -- path relative to site base url (the "selif" in mylinx.example.org/selif/image.jpg) where files are accessed directly (default: selif)
|
||||
- ```-maxsize 4294967296``` -- maximum upload file size in bytes (default 4GB)
|
||||
- ```-maxexpiry 86400``` -- maximum expiration time in seconds (default is 0, which is no expiry)
|
||||
- ```-allowhotlink``` -- Allow file hotlinking
|
||||
- ```-contentsecuritypolicy "..."``` -- Content-Security-Policy header for pages (default is "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
|
||||
- ```-filecontentsecuritypolicy "..."``` -- Content-Security-Policy header for files (default is "default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
|
||||
- ```-refererpolicy "..."``` -- Referrer-Policy header for pages (default is "same-origin")
|
||||
- ```-filereferrerpolicy "..."``` -- Referrer-Policy header for files (default is "same-origin")
|
||||
- ```-xframeoptions "..." ``` -- X-Frame-Options header (default is "SAMEORIGIN")
|
||||
- ```-remoteuploads``` -- (optionally) enable remote uploads (/upload?url=https://...)
|
||||
- ```-nologs``` -- (optionally) disable request logs in stdout
|
||||
- ```-force-random-filename``` -- (optionally) force the use of random filenames
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```bind = 127.0.0.1:8080``` | what to bind to (default is 127.0.0.1:8080)
|
||||
| ```sitename = myLinx``` | the site name displayed on top (default is inferred from Host header)
|
||||
| ```siteurl = https://mylinx.example.org/``` | the site url (default is inferred from execution context)
|
||||
| ```selifpath = selif``` | path relative to site base url (the "selif" in mylinx.example.org/selif/image.jpg) where files are accessed directly (default: selif)
|
||||
| ```maxsize = 4294967296``` | maximum upload file size in bytes (default 4GB)
|
||||
| ```maxexpiry = 86400``` | maximum expiration time in seconds (default is 0, which is no expiry)
|
||||
| ```allowhotlink = true``` | Allow file hotlinking
|
||||
| ```contentsecuritypolicy = "..."``` | Content-Security-Policy header for pages (default is "default-src 'self'; img-src 'self' data:; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
|
||||
| ```filecontentsecuritypolicy = "..."``` | Content-Security-Policy header for files (default is "default-src 'none'; img-src 'self'; object-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; frame-ancestors 'self';")
|
||||
| ```refererpolicy = "..."``` | Referrer-Policy header for pages (default is "same-origin")
|
||||
| ```filereferrerpolicy = "..."``` | Referrer-Policy header for files (default is "same-origin")
|
||||
| ```xframeoptions = "..." ``` | X-Frame-Options header (default is "SAMEORIGIN")
|
||||
| ```remoteuploads = true``` | (optionally) enable remote uploads (/upload?url=https://...)
|
||||
| ```nologs = true``` | (optionally) disable request logs in stdout
|
||||
| ```force-random-filename = true``` | (optionally) force the use of random filenames
|
||||
| ```custompagespath = custom_pages/``` | (optionally) specify path to directory containing markdown pages (must end in .md) that will be added to the site navigation (this can be useful for providing contact/support information and so on). For example, custom_pages/My_Page.md will become My Page in the site navigation
|
||||
|
||||
|
||||
#### Cleaning up expired files
|
||||
When files expire, access is disabled immediately, but the files and metadata
|
||||
will persist on disk until someone attempts to access them. You can set the following option to run cleanup every few minutes. This can also be done using a separate utility found the linx-cleanup directory.
|
||||
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```cleanup-every-minutes = 5``` | How often to clean up expired files in minutes (default is 0, which means files will be cleaned up as they are accessed)
|
||||
|
||||
|
||||
#### Require API Keys for uploads
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```authfile = path/to/authfile``` | (optionally) require authorization for upload/delete by providing a newline-separated file of scrypted auth keys
|
||||
| ```remoteauthfile = path/to/remoteauthfile``` | (optionally) require authorization for remote uploads by providing a newline-separated file of scrypted auth keys
|
||||
| ```basicauth = true``` | (optionally) allow basic authorization to upload or paste files from browser when `-authfile` is enabled. When uploading, you will be prompted to enter a user and password - leave the user blank and use your auth key as the password
|
||||
|
||||
A helper utility ```linx-genkey``` is provided which hashes keys to the format required in the auth files.
|
||||
|
||||
#### Storage backends
|
||||
The following storage backends are available:
|
||||
|
||||
|Name|Notes|Options
|
||||
|----|-----|-------
|
||||
|LocalFS|Enabled by default, this backend uses the filesystem|```-filespath files/``` -- Path to store uploads (default is files/)<br />```-metapath meta/``` -- Path to store information about uploads (default is meta/)|
|
||||
|S3|Use with any S3-compatible provider.<br> This implementation will stream files through the linx instance (every download will request and stream the file from the S3 bucket).<br><br>For high-traffic environments, one might consider using an external caching layer such as described [in this article](https://blog.sentry.io/2017/03/01/dodging-s3-downtime-with-nginx-and-haproxy.html).|```-s3-endpoint https://...``` -- S3 endpoint<br>```-s3-region us-east-1``` -- S3 region<br>```-s3-bucket mybucket``` -- S3 bucket to use for files and metadata<br>```-s3-force-path-style``` (optional) -- force path-style addresing (e.g. https://<span></span>s3.amazonaws.com/linx/example.txt)<br><br>Environment variables to provide:<br>```AWS_ACCESS_KEY_ID``` -- the S3 access key<br>```AWS_SECRET_ACCESS_KEY ``` -- the S3 secret key<br>```AWS_SESSION_TOKEN``` (optional) -- the S3 session token|
|
||||
|LocalFS|Enabled by default, this backend uses the filesystem|```filespath = files/``` -- Path to store uploads (default is files/)<br />```metapath = meta/``` -- Path to store information about uploads (default is meta/)|
|
||||
|S3|Use with any S3-compatible provider.<br> This implementation will stream files through the linx instance (every download will request and stream the file from the S3 bucket). File metadata will be stored as tags on the object in the bucket.<br><br>For high-traffic environments, one might consider using an external caching layer such as described [in this article](https://blog.sentry.io/2017/03/01/dodging-s3-downtime-with-nginx-and-haproxy.html).|```s3-endpoint = https://...``` -- S3 endpoint<br>```s3-region = us-east-1``` -- S3 region<br>```s3-bucket = mybucket``` -- S3 bucket to use for files and metadata<br>```s3-force-path-style = true``` (optional) -- force path-style addresing (e.g. https://<span></span>s3.amazonaws.com/linx/example.txt)<br><br>Environment variables to provide:<br>```AWS_ACCESS_KEY_ID``` -- the S3 access key<br>```AWS_SECRET_ACCESS_KEY ``` -- the S3 secret key<br>```AWS_SESSION_TOKEN``` (optional) -- the S3 session token|
|
||||
|
||||
|
||||
#### SSL with built-in server
|
||||
- ```-certfile path/to/your.crt``` -- Path to the ssl certificate (required if you want to use the https server)
|
||||
- ```-keyfile path/to/your.key``` -- Path to the ssl key (required if you want to use the https server)
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```certfile = path/to/your.crt``` | Path to the ssl certificate (required if you want to use the https server)
|
||||
| ```keyfile = path/to/your.key``` | Path to the ssl key (required if you want to use the https server)
|
||||
|
||||
#### Use with http proxy
|
||||
- ```-realip``` -- let linx-server know you (nginx, etc) are providing the X-Real-IP and/or X-Forwarded-For headers.
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```realip = true``` | let linx-server know you (nginx, etc) are providing the X-Real-IP and/or X-Forwarded-For headers.
|
||||
|
||||
#### Use with fastcgi
|
||||
- ```-fastcgi``` -- serve through fastcgi
|
||||
|
||||
#### Require API Keys for uploads
|
||||
- ```-authfile path/to/authfile``` -- (optionally) require authorization for upload/delete by providing a newline-separated file of scrypted auth keys
|
||||
- ```-remoteauthfile path/to/remoteauthfile``` -- (optionally) require authorization for remote uploads by providing a newline-separated file of scrypted auth keys
|
||||
|
||||
A helper utility ```linx-genkey``` is provided which hashes keys to the format required in the auth files.
|
||||
|
||||
|
||||
Cleaning up expired files
|
||||
-------------------------
|
||||
When files expire, access is disabled immediately, but the files and metadata
|
||||
will persist on disk until someone attempts to access them. If you'd like to
|
||||
automatically clean up files that have expired, you can use the included
|
||||
`linx-cleanup` utility. To run it automatically, use a cronjob or similar type
|
||||
of scheduled task.
|
||||
|
||||
You should be careful to ensure that only one instance of `linx-client` runs at
|
||||
a time to avoid unexpected behavior. It does not implement any type of locking.
|
||||
|
||||
#### Options
|
||||
- ```-filespath files/``` -- Path to stored uploads (default is files/)
|
||||
- ```-metapath meta/``` -- Path to stored information about uploads (default is meta/)
|
||||
- ```-nologs``` -- (optionally) disable deletion logs in stdout
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```fastcgi = true``` | serve through fastcgi
|
||||
|
||||
Deployment
|
||||
----------
|
||||
|
@ -120,29 +182,16 @@ server {
|
|||
}
|
||||
}
|
||||
```
|
||||
And run linx-server with the ```-fastcgi``` option.
|
||||
And run linx-server with the ```fastcgi = true``` option.
|
||||
|
||||
#### 2. Using the built-in https server
|
||||
Run linx-server with the ```-certfile path/to/cert.file``` and ```-keyfile path/to/key.file``` options.
|
||||
Run linx-server with the ```certfile = path/to/cert.file``` and ```keyfile = path/to/key.file``` options.
|
||||
|
||||
#### 3. Using the built-in http server
|
||||
Run linx-server normally.
|
||||
|
||||
#### 4. Using Docker with the built-in http server
|
||||
First, build the image:
|
||||
```docker build -t linx-server .```
|
||||
|
||||
You'll need some directories for the persistent storage. For the purposes of this example, we will use `/media/meta` and `/media/files`.
|
||||
|
||||
Then, run it:
|
||||
```docker run -p 8080:8080 -v /media/meta:/data/meta -v /media/files:/data/files linx-server```
|
||||
|
||||
|
||||
Development
|
||||
-----------
|
||||
Any help is welcome, PRs will be reviewed and merged accordingly.
|
||||
The official IRC channel is #linx on irc.oftc.net
|
||||
|
||||
1. ```go get -u github.com/andreimarcu/linx-server ```
|
||||
2. ```cd $GOPATH/src/github.com/andreimarcu/linx-server ```
|
||||
3. ```go build && ./linx-server```
|
||||
|
@ -167,4 +216,4 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|||
|
||||
Author
|
||||
-------
|
||||
Andrei Marcu, http://andreim.net/
|
||||
Andrei Marcu, https://andreim.net/
|
||||
|
|
147
access.go
Normal file
147
access.go
Normal file
|
@ -0,0 +1,147 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/flosch/pongo2"
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
type accessKeySource int
|
||||
|
||||
const (
|
||||
accessKeySourceNone accessKeySource = iota
|
||||
accessKeySourceCookie
|
||||
accessKeySourceHeader
|
||||
accessKeySourceForm
|
||||
accessKeySourceQuery
|
||||
)
|
||||
|
||||
const accessKeyHeaderName = "Linx-Access-Key"
|
||||
const accessKeyParamName = "access_key"
|
||||
|
||||
var (
|
||||
errInvalidAccessKey = errors.New("invalid access key")
|
||||
|
||||
cliUserAgentRe = regexp.MustCompile("(?i)(lib)?curl|wget")
|
||||
)
|
||||
|
||||
func checkAccessKey(r *http.Request, metadata *backends.Metadata) (accessKeySource, error) {
|
||||
key := metadata.AccessKey
|
||||
if key == "" {
|
||||
return accessKeySourceNone, nil
|
||||
}
|
||||
|
||||
cookieKey, err := r.Cookie(accessKeyHeaderName)
|
||||
if err == nil {
|
||||
if cookieKey.Value == key {
|
||||
return accessKeySourceCookie, nil
|
||||
}
|
||||
return accessKeySourceCookie, errInvalidAccessKey
|
||||
}
|
||||
|
||||
headerKey := r.Header.Get(accessKeyHeaderName)
|
||||
if headerKey == key {
|
||||
return accessKeySourceHeader, nil
|
||||
} else if headerKey != "" {
|
||||
return accessKeySourceHeader, errInvalidAccessKey
|
||||
}
|
||||
|
||||
formKey := r.PostFormValue(accessKeyParamName)
|
||||
if formKey == key {
|
||||
return accessKeySourceForm, nil
|
||||
} else if formKey != "" {
|
||||
return accessKeySourceForm, errInvalidAccessKey
|
||||
}
|
||||
|
||||
queryKey := r.URL.Query().Get(accessKeyParamName)
|
||||
if queryKey == key {
|
||||
return accessKeySourceQuery, nil
|
||||
} else if formKey != "" {
|
||||
return accessKeySourceQuery, errInvalidAccessKey
|
||||
}
|
||||
|
||||
return accessKeySourceNone, errInvalidAccessKey
|
||||
}
|
||||
|
||||
func setAccessKeyCookies(w http.ResponseWriter, siteURL, fileName, value string, expires time.Time) {
|
||||
u, err := url.Parse(siteURL)
|
||||
if err != nil {
|
||||
log.Printf("cant parse siteURL (%v): %v", siteURL, err)
|
||||
return
|
||||
}
|
||||
|
||||
cookie := http.Cookie{
|
||||
Name: accessKeyHeaderName,
|
||||
Value: value,
|
||||
HttpOnly: true,
|
||||
Domain: u.Hostname(),
|
||||
Expires: expires,
|
||||
}
|
||||
|
||||
cookie.Path = path.Join(u.Path, fileName)
|
||||
http.SetCookie(w, &cookie)
|
||||
|
||||
cookie.Path = path.Join(u.Path, Config.selifPath, fileName)
|
||||
http.SetCookie(w, &cookie)
|
||||
}
|
||||
|
||||
func fileAccessHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
if !Config.noDirectAgents && cliUserAgentRe.MatchString(r.Header.Get("User-Agent")) && !strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
fileServeHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
fileName := c.URLParams["name"]
|
||||
|
||||
metadata, err := checkFile(fileName)
|
||||
if err == backends.NotFoundErr {
|
||||
notFoundHandler(c, w, r)
|
||||
return
|
||||
} else if err != nil {
|
||||
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
|
||||
return
|
||||
}
|
||||
|
||||
if src, err := checkAccessKey(r, &metadata); err != nil {
|
||||
// remove invalid cookie
|
||||
if src == accessKeySourceCookie {
|
||||
setAccessKeyCookies(w, getSiteURL(r), fileName, "", time.Unix(0, 0))
|
||||
}
|
||||
|
||||
if strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
dec := json.NewEncoder(w)
|
||||
_ = dec.Encode(map[string]string{
|
||||
"error": errInvalidAccessKey.Error(),
|
||||
})
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
_ = renderTemplate(Templates["access.html"], pongo2.Context{
|
||||
"filename": fileName,
|
||||
"accesspath": fileName,
|
||||
}, r, w)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if metadata.AccessKey != "" {
|
||||
var expiry time.Time
|
||||
if Config.accessKeyCookieExpiry != 0 {
|
||||
expiry = time.Now().Add(time.Duration(Config.accessKeyCookieExpiry) * time.Second)
|
||||
}
|
||||
setAccessKeyCookies(w, getSiteURL(r), fileName, metadata.AccessKey, expiry)
|
||||
}
|
||||
|
||||
fileDisplayHandler(c, w, r, fileName, metadata)
|
||||
}
|
116
auth.go
116
auth.go
|
@ -1,116 +0,0 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/base64"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"golang.org/x/crypto/scrypt"
|
||||
)
|
||||
|
||||
const (
|
||||
scryptSalt = "linx-server"
|
||||
scryptN = 16384
|
||||
scryptr = 8
|
||||
scryptp = 1
|
||||
scryptKeyLen = 32
|
||||
)
|
||||
|
||||
type AuthOptions struct {
|
||||
AuthFile string
|
||||
UnauthMethods []string
|
||||
}
|
||||
|
||||
type auth struct {
|
||||
successHandler http.Handler
|
||||
failureHandler http.Handler
|
||||
authKeys []string
|
||||
o AuthOptions
|
||||
}
|
||||
|
||||
func readAuthKeys(authFile string) []string {
|
||||
var authKeys []string
|
||||
|
||||
f, err := os.Open(authFile)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to open authfile: ", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
authKeys = append(authKeys, scanner.Text())
|
||||
}
|
||||
|
||||
err = scanner.Err()
|
||||
if err != nil {
|
||||
log.Fatal("Scanner error while reading authfile: ", err)
|
||||
}
|
||||
|
||||
return authKeys
|
||||
}
|
||||
|
||||
func checkAuth(authKeys []string, key string) (result bool, err error) {
|
||||
checkKey, err := scrypt.Key([]byte(key), []byte(scryptSalt), scryptN, scryptr, scryptp, scryptKeyLen)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
encodedKey := base64.StdEncoding.EncodeToString(checkKey)
|
||||
for _, v := range authKeys {
|
||||
if encodedKey == v {
|
||||
result = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
result = false
|
||||
return
|
||||
}
|
||||
|
||||
func (a auth) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
if sliceContains(a.o.UnauthMethods, r.Method) {
|
||||
// allow unauthenticated methods
|
||||
a.successHandler.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
key := r.Header.Get("Linx-Api-Key")
|
||||
|
||||
result, err := checkAuth(a.authKeys, key)
|
||||
if err != nil || !result {
|
||||
a.failureHandler.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
a.successHandler.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func UploadAuth(o AuthOptions) func(http.Handler) http.Handler {
|
||||
fn := func(h http.Handler) http.Handler {
|
||||
return auth{
|
||||
successHandler: h,
|
||||
failureHandler: http.HandlerFunc(badAuthorizationHandler),
|
||||
authKeys: readAuthKeys(o.AuthFile),
|
||||
o: o,
|
||||
}
|
||||
}
|
||||
return fn
|
||||
}
|
||||
|
||||
func badAuthorizationHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
func sliceContains(slice []string, s string) bool {
|
||||
for _, v := range slice {
|
||||
if s == v {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
154
auth/apikeys/apikeys.go
Normal file
154
auth/apikeys/apikeys.go
Normal file
|
@ -0,0 +1,154 @@
|
|||
package apikeys
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
|
||||
"golang.org/x/crypto/scrypt"
|
||||
|
||||
"github.com/zenazn/goji/web"
|
||||
)
|
||||
|
||||
const (
|
||||
scryptSalt = "linx-server"
|
||||
scryptN = 16384
|
||||
scryptr = 8
|
||||
scryptp = 1
|
||||
scryptKeyLen = 32
|
||||
)
|
||||
|
||||
type AuthOptions struct {
|
||||
AuthFile string
|
||||
UnauthMethods []string
|
||||
BasicAuth bool
|
||||
SiteName string
|
||||
SitePath string
|
||||
}
|
||||
|
||||
type ApiKeysMiddleware struct {
|
||||
successHandler http.Handler
|
||||
authKeys []string
|
||||
o AuthOptions
|
||||
}
|
||||
|
||||
func ReadAuthKeys(authFile string) []string {
|
||||
var authKeys []string
|
||||
|
||||
f, err := os.Open(authFile)
|
||||
if err != nil {
|
||||
log.Fatal("Failed to open authfile: ", err)
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
scanner := bufio.NewScanner(f)
|
||||
for scanner.Scan() {
|
||||
authKeys = append(authKeys, scanner.Text())
|
||||
}
|
||||
|
||||
err = scanner.Err()
|
||||
if err != nil {
|
||||
log.Fatal("Scanner error while reading authfile: ", err)
|
||||
}
|
||||
|
||||
return authKeys
|
||||
}
|
||||
|
||||
func CheckAuth(authKeys []string, key string) (result bool, err error) {
|
||||
checkKey, err := scrypt.Key([]byte(key), []byte(scryptSalt), scryptN, scryptr, scryptp, scryptKeyLen)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
encodedKey := base64.StdEncoding.EncodeToString(checkKey)
|
||||
for _, v := range authKeys {
|
||||
if encodedKey == v {
|
||||
result = true
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
result = false
|
||||
return
|
||||
}
|
||||
|
||||
func (a ApiKeysMiddleware) getSitePrefix() string {
|
||||
prefix := a.o.SitePath
|
||||
if len(prefix) <= 0 || prefix[0] != '/' {
|
||||
prefix = "/" + prefix
|
||||
}
|
||||
return prefix
|
||||
}
|
||||
|
||||
func (a ApiKeysMiddleware) goodAuthorizationHandler(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Location", a.getSitePrefix())
|
||||
w.WriteHeader(http.StatusFound)
|
||||
}
|
||||
|
||||
func (a ApiKeysMiddleware) badAuthorizationHandler(w http.ResponseWriter, r *http.Request) {
|
||||
if a.o.BasicAuth {
|
||||
rs := ""
|
||||
if a.o.SiteName != "" {
|
||||
rs = fmt.Sprintf(` realm="%s"`, a.o.SiteName)
|
||||
}
|
||||
w.Header().Set("WWW-Authenticate", `Basic`+rs)
|
||||
}
|
||||
http.Error(w, http.StatusText(http.StatusUnauthorized), http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
func (a ApiKeysMiddleware) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
var successHandler http.Handler
|
||||
prefix := a.getSitePrefix()
|
||||
|
||||
if r.URL.Path == prefix+"auth" {
|
||||
successHandler = http.HandlerFunc(a.goodAuthorizationHandler)
|
||||
} else {
|
||||
successHandler = a.successHandler
|
||||
}
|
||||
|
||||
if sliceContains(a.o.UnauthMethods, r.Method) && r.URL.Path != prefix+"auth" {
|
||||
// allow unauthenticated methods
|
||||
successHandler.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
key := r.Header.Get("Linx-Api-Key")
|
||||
if key == "" && a.o.BasicAuth {
|
||||
_, password, ok := r.BasicAuth()
|
||||
if ok {
|
||||
key = password
|
||||
}
|
||||
}
|
||||
|
||||
result, err := CheckAuth(a.authKeys, key)
|
||||
if err != nil || !result {
|
||||
http.HandlerFunc(a.badAuthorizationHandler).ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
successHandler.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func NewApiKeysMiddleware(o AuthOptions) func(*web.C, http.Handler) http.Handler {
|
||||
fn := func(c *web.C, h http.Handler) http.Handler {
|
||||
return ApiKeysMiddleware{
|
||||
successHandler: h,
|
||||
authKeys: ReadAuthKeys(o.AuthFile),
|
||||
o: o,
|
||||
}
|
||||
}
|
||||
return fn
|
||||
}
|
||||
|
||||
func sliceContains(slice []string, s string) bool {
|
||||
for _, v := range slice {
|
||||
if s == v {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
package main
|
||||
package apikeys
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
@ -10,15 +10,15 @@ func TestCheckAuth(t *testing.T) {
|
|||
"vFpNprT9wbHgwAubpvRxYCCpA2FQMAK6hFqPvAGrdZo=",
|
||||
}
|
||||
|
||||
if r, err := checkAuth(authKeys, ""); err != nil && r {
|
||||
if r, err := CheckAuth(authKeys, ""); err != nil && r {
|
||||
t.Fatal("Authorization passed for empty key")
|
||||
}
|
||||
|
||||
if r, err := checkAuth(authKeys, "thisisnotvalid"); err != nil && r {
|
||||
if r, err := CheckAuth(authKeys, "thisisnotvalid"); err != nil && r {
|
||||
t.Fatal("Authorization passed for invalid key")
|
||||
}
|
||||
|
||||
if r, err := checkAuth(authKeys, "haPVipRnGJ0QovA9nyqK"); err != nil && !r {
|
||||
if r, err := CheckAuth(authKeys, "haPVipRnGJ0QovA9nyqK"); err != nil && !r {
|
||||
t.Fatal("Authorization failed for valid key")
|
||||
}
|
||||
}
|
|
@ -4,6 +4,7 @@ import (
|
|||
"encoding/json"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"time"
|
||||
|
@ -19,6 +20,7 @@ type LocalfsBackend struct {
|
|||
|
||||
type MetadataJSON struct {
|
||||
DeleteKey string `json:"delete_key"`
|
||||
AccessKey string `json:"access_key,omitempty"`
|
||||
Sha256sum string `json:"sha256sum"`
|
||||
Mimetype string `json:"mimetype"`
|
||||
Size int64 `json:"size"`
|
||||
|
@ -57,6 +59,7 @@ func (b LocalfsBackend) Head(key string) (metadata backends.Metadata, err error)
|
|||
}
|
||||
|
||||
metadata.DeleteKey = mjson.DeleteKey
|
||||
metadata.AccessKey = mjson.AccessKey
|
||||
metadata.Mimetype = mjson.Mimetype
|
||||
metadata.ArchiveFiles = mjson.ArchiveFiles
|
||||
metadata.Sha256sum = mjson.Sha256sum
|
||||
|
@ -80,16 +83,29 @@ func (b LocalfsBackend) Get(key string) (metadata backends.Metadata, f io.ReadCl
|
|||
return
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) ServeFile(key string, w http.ResponseWriter, r *http.Request) (err error) {
|
||||
_, err = b.Head(key)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
filePath := path.Join(b.filesPath, key)
|
||||
http.ServeFile(w, r, filePath)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) writeMetadata(key string, metadata backends.Metadata) error {
|
||||
metaPath := path.Join(b.metaPath, key)
|
||||
|
||||
mjson := MetadataJSON{
|
||||
DeleteKey: metadata.DeleteKey,
|
||||
Mimetype: metadata.Mimetype,
|
||||
DeleteKey: metadata.DeleteKey,
|
||||
AccessKey: metadata.AccessKey,
|
||||
Mimetype: metadata.Mimetype,
|
||||
ArchiveFiles: metadata.ArchiveFiles,
|
||||
Sha256sum: metadata.Sha256sum,
|
||||
Expiry: metadata.Expiry.Unix(),
|
||||
Size: metadata.Size,
|
||||
Sha256sum: metadata.Sha256sum,
|
||||
Expiry: metadata.Expiry.Unix(),
|
||||
Size: metadata.Size,
|
||||
}
|
||||
|
||||
dst, err := os.Create(metaPath)
|
||||
|
@ -108,7 +124,7 @@ func (b LocalfsBackend) writeMetadata(key string, metadata backends.Metadata) er
|
|||
return nil
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) {
|
||||
func (b LocalfsBackend) Put(key string, r io.Reader, expiry time.Time, deleteKey, accessKey string) (m backends.Metadata, err error) {
|
||||
filePath := path.Join(b.filesPath, key)
|
||||
|
||||
dst, err := os.Create(filePath)
|
||||
|
@ -126,11 +142,17 @@ func (b LocalfsBackend) Put(key string, r io.Reader, expiry time.Time, deleteKey
|
|||
return m, err
|
||||
}
|
||||
|
||||
dst.Seek(0, 0)
|
||||
m, err = helpers.GenerateMetadata(dst)
|
||||
if err != nil {
|
||||
os.Remove(filePath)
|
||||
return
|
||||
}
|
||||
dst.Seek(0, 0)
|
||||
|
||||
m.Expiry = expiry
|
||||
m.DeleteKey = deleteKey
|
||||
m.Size = bytes
|
||||
m.Mimetype, _ = helpers.DetectMime(dst)
|
||||
m.Sha256sum, _ = helpers.Sha256sum(dst)
|
||||
m.AccessKey = accessKey
|
||||
m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, dst)
|
||||
|
||||
err = b.writeMetadata(key, m)
|
||||
|
@ -142,6 +164,15 @@ func (b LocalfsBackend) Put(key string, r io.Reader, expiry time.Time, deleteKey
|
|||
return
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) PutMetadata(key string, m backends.Metadata) (err error) {
|
||||
err = b.writeMetadata(key, m)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b LocalfsBackend) Size(key string) (int64, error) {
|
||||
fileInfo, err := os.Stat(path.Join(b.filesPath, key))
|
||||
if err != nil {
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
|
||||
type Metadata struct {
|
||||
DeleteKey string
|
||||
AccessKey string
|
||||
Sha256sum string
|
||||
Mimetype string
|
||||
Size int64
|
||||
|
|
|
@ -3,6 +3,7 @@ package s3
|
|||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
"time"
|
||||
|
@ -18,13 +19,13 @@ import (
|
|||
|
||||
type S3Backend struct {
|
||||
bucket string
|
||||
svc *s3.S3
|
||||
svc *s3.S3
|
||||
}
|
||||
|
||||
func (b S3Backend) Delete(key string) error {
|
||||
_, err := b.svc.DeleteObject(&s3.DeleteObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -35,7 +36,7 @@ func (b S3Backend) Delete(key string) error {
|
|||
func (b S3Backend) Exists(key string) (bool, error) {
|
||||
_, err := b.svc.HeadObject(&s3.HeadObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
return err == nil, err
|
||||
}
|
||||
|
@ -44,7 +45,7 @@ func (b S3Backend) Head(key string) (metadata backends.Metadata, err error) {
|
|||
var result *s3.HeadObjectOutput
|
||||
result, err = b.svc.HeadObject(&s3.HeadObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
if err != nil {
|
||||
if aerr, ok := err.(awserr.Error); ok {
|
||||
|
@ -63,7 +64,7 @@ func (b S3Backend) Get(key string) (metadata backends.Metadata, r io.ReadCloser,
|
|||
var result *s3.GetObjectOutput
|
||||
result, err = b.svc.GetObject(&s3.GetObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
if err != nil {
|
||||
if aerr, ok := err.(awserr.Error); ok {
|
||||
|
@ -79,13 +80,51 @@ func (b S3Backend) Get(key string) (metadata backends.Metadata, r io.ReadCloser,
|
|||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) ServeFile(key string, w http.ResponseWriter, r *http.Request) (err error) {
|
||||
var result *s3.GetObjectOutput
|
||||
|
||||
if r.Header.Get("Range") != "" {
|
||||
result, err = b.svc.GetObject(&s3.GetObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Range: aws.String(r.Header.Get("Range")),
|
||||
})
|
||||
|
||||
w.WriteHeader(206)
|
||||
w.Header().Set("Content-Range", *result.ContentRange)
|
||||
w.Header().Set("Content-Length", strconv.FormatInt(*result.ContentLength, 10))
|
||||
w.Header().Set("Accept-Ranges", "bytes")
|
||||
|
||||
} else {
|
||||
result, err = b.svc.GetObject(&s3.GetObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
if aerr, ok := err.(awserr.Error); ok {
|
||||
if aerr.Code() == s3.ErrCodeNoSuchKey || aerr.Code() == "NotFound" {
|
||||
err = backends.NotFoundErr
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
_, err = io.Copy(w, result.Body)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func mapMetadata(m backends.Metadata) map[string]*string {
|
||||
return map[string]*string{
|
||||
"Expiry": aws.String(strconv.FormatInt(m.Expiry.Unix(), 10)),
|
||||
"Delete_key": aws.String(m.DeleteKey),
|
||||
"Size": aws.String(strconv.FormatInt(m.Size, 10)),
|
||||
"Mimetype": aws.String(m.Mimetype),
|
||||
"Expiry": aws.String(strconv.FormatInt(m.Expiry.Unix(), 10)),
|
||||
"Deletekey": aws.String(m.DeleteKey),
|
||||
"Size": aws.String(strconv.FormatInt(m.Size, 10)),
|
||||
"Mimetype": aws.String(m.Mimetype),
|
||||
"Sha256sum": aws.String(m.Sha256sum),
|
||||
"AccessKey": aws.String(m.AccessKey),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,13 +140,22 @@ func unmapMetadata(input map[string]*string) (m backends.Metadata, err error) {
|
|||
return
|
||||
}
|
||||
|
||||
m.DeleteKey = aws.StringValue(input["Delete_key"])
|
||||
m.DeleteKey = aws.StringValue(input["Deletekey"])
|
||||
if m.DeleteKey == "" {
|
||||
m.DeleteKey = aws.StringValue(input["Delete_key"])
|
||||
}
|
||||
|
||||
m.Mimetype = aws.StringValue(input["Mimetype"])
|
||||
m.Sha256sum = aws.StringValue(input["Sha256sum"])
|
||||
|
||||
if key, ok := input["AccessKey"]; ok {
|
||||
m.AccessKey = aws.StringValue(key)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey string) (m backends.Metadata, err error) {
|
||||
func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey, accessKey string) (m backends.Metadata, err error) {
|
||||
tmpDst, err := ioutil.TempFile("", "linx-server-upload")
|
||||
if err != nil {
|
||||
return m, err
|
||||
|
@ -122,19 +170,31 @@ func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey stri
|
|||
return m, err
|
||||
}
|
||||
|
||||
_, err = tmpDst.Seek(0, 0)
|
||||
if err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
m, err = helpers.GenerateMetadata(tmpDst)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
m.Expiry = expiry
|
||||
m.DeleteKey = deleteKey
|
||||
m.Size = bytes
|
||||
m.Mimetype, _ = helpers.DetectMime(tmpDst)
|
||||
m.Sha256sum, _ = helpers.Sha256sum(tmpDst)
|
||||
m.AccessKey = accessKey
|
||||
// XXX: we may not be able to write this to AWS easily
|
||||
//m.ArchiveFiles, _ = helpers.ListArchiveFiles(m.Mimetype, m.Size, tmpDst)
|
||||
|
||||
_, err = tmpDst.Seek(0, 0)
|
||||
if err != nil {
|
||||
return m, err
|
||||
}
|
||||
|
||||
uploader := s3manager.NewUploaderWithClient(b.svc)
|
||||
input := &s3manager.UploadInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Body: tmpDst,
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Body: tmpDst,
|
||||
Metadata: mapMetadata(m),
|
||||
}
|
||||
_, err = uploader.Upload(input)
|
||||
|
@ -145,10 +205,25 @@ func (b S3Backend) Put(key string, r io.Reader, expiry time.Time, deleteKey stri
|
|||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) PutMetadata(key string, m backends.Metadata) (err error) {
|
||||
_, err = b.svc.CopyObject(&s3.CopyObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
CopySource: aws.String("/" + b.bucket + "/" + key),
|
||||
Metadata: mapMetadata(m),
|
||||
MetadataDirective: aws.String("REPLACE"),
|
||||
})
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func (b S3Backend) Size(key string) (int64, error) {
|
||||
input := &s3.HeadObjectInput{
|
||||
Bucket: aws.String(b.bucket),
|
||||
Key: aws.String(key),
|
||||
Key: aws.String(key),
|
||||
}
|
||||
result, err := b.svc.HeadObject(input)
|
||||
if err != nil {
|
||||
|
@ -169,7 +244,6 @@ func (b S3Backend) List() ([]string, error) {
|
|||
return nil, err
|
||||
}
|
||||
|
||||
|
||||
for _, object := range results.Contents {
|
||||
output = append(output, *object.Key)
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package backends
|
|||
import (
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
)
|
||||
|
||||
|
@ -11,7 +12,9 @@ type StorageBackend interface {
|
|||
Exists(key string) (bool, error)
|
||||
Head(key string) (Metadata, error)
|
||||
Get(key string) (Metadata, io.ReadCloser, error)
|
||||
Put(key string, r io.Reader, expiry time.Time, deleteKey string) (Metadata, error)
|
||||
Put(key string, r io.Reader, expiry time.Time, deleteKey, accessKey string) (Metadata, error)
|
||||
PutMetadata(key string, m Metadata) error
|
||||
ServeFile(key string, w http.ResponseWriter, r *http.Request) error
|
||||
Size(key string) (int64, error)
|
||||
}
|
||||
|
||||
|
|
9
build.sh
9
build.sh
|
@ -13,6 +13,11 @@ function build_binary_rice {
|
|||
rice append --exec "$name"freebsd-$arch
|
||||
done
|
||||
|
||||
for arch in arm amd64 386; do
|
||||
GOOS=netbsd GOARCH=$arch go build -o "$name"netbsd-$arch
|
||||
rice append --exec "$name"netbsd-$arch
|
||||
done
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch
|
||||
rice append --exec "$name"openbsd-$arch
|
||||
|
@ -40,6 +45,10 @@ function build_binary {
|
|||
GOOS=freebsd GOARCH=$arch go build -o "$name"freebsd-$arch
|
||||
done
|
||||
|
||||
for arch in arm amd64 386; do
|
||||
GOOS=netbsd GOARCH=$arch go build -o "$name"netbsd-$arch
|
||||
done
|
||||
|
||||
for arch in amd64 386; do
|
||||
GOOS=openbsd GOARCH=$arch go build -o "$name"openbsd-$arch
|
||||
done
|
||||
|
|
|
@ -1,26 +1,14 @@
|
|||
package main
|
||||
package cleanup
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"log"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends/localfs"
|
||||
"github.com/andreimarcu/linx-server/expiry"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var filesDir string
|
||||
var metaDir string
|
||||
var noLogs bool
|
||||
|
||||
flag.StringVar(&filesDir, "filespath", "files/",
|
||||
"path to files directory")
|
||||
flag.StringVar(&metaDir, "metapath", "meta/",
|
||||
"path to metadata directory")
|
||||
flag.BoolVar(&noLogs, "nologs", false,
|
||||
"don't log deleted files")
|
||||
flag.Parse()
|
||||
|
||||
func Cleanup(filesDir string, metaDir string, noLogs bool) {
|
||||
fileBackend := localfs.NewLocalfsBackend(metaDir, filesDir)
|
||||
|
||||
files, err := fileBackend.List()
|
||||
|
@ -44,3 +32,11 @@ func main() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
func PeriodicCleanup(minutes time.Duration, filesDir string, metaDir string, noLogs bool) {
|
||||
c := time.Tick(minutes)
|
||||
for range c {
|
||||
Cleanup(filesDir, metaDir, noLogs)
|
||||
}
|
||||
|
||||
}
|
40
custom_pages.go
Normal file
40
custom_pages.go
Normal file
|
@ -0,0 +1,40 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/microcosm-cc/bluemonday"
|
||||
"github.com/russross/blackfriday"
|
||||
)
|
||||
|
||||
func initializeCustomPages(customPagesDir string) {
|
||||
files, err := ioutil.ReadDir(customPagesDir)
|
||||
if err != nil {
|
||||
log.Fatal("Error reading the custom pages directory: ", err)
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
fileName := file.Name()
|
||||
|
||||
if len(fileName) <= 3 {
|
||||
continue
|
||||
}
|
||||
|
||||
if strings.EqualFold(string(fileName[len(fileName)-3:len(fileName)]), ".md") {
|
||||
contents, err := ioutil.ReadFile(path.Join(customPagesDir, fileName))
|
||||
if err != nil {
|
||||
log.Fatalf("Error reading file %s", fileName)
|
||||
}
|
||||
|
||||
unsafe := blackfriday.MarkdownCommon(contents)
|
||||
html := bluemonday.UGCPolicy().SanitizeBytes(unsafe)
|
||||
|
||||
fileName := fileName[0 : len(fileName)-3]
|
||||
customPages[fileName] = string(html)
|
||||
customPagesNames[fileName] = strings.ReplaceAll(fileName, "_", " ")
|
||||
}
|
||||
}
|
||||
}
|
25
display.go
25
display.go
|
@ -5,7 +5,6 @@ import (
|
|||
"io/ioutil"
|
||||
"net/http"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
@ -21,24 +20,7 @@ import (
|
|||
|
||||
const maxDisplayFileSizeBytes = 1024 * 512
|
||||
|
||||
var cliUserAgentRe = regexp.MustCompile("(?i)(lib)?curl|wget")
|
||||
|
||||
func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
if !Config.noDirectAgents && cliUserAgentRe.MatchString(r.Header.Get("User-Agent")) && !strings.EqualFold("application/json", r.Header.Get("Accept")) {
|
||||
fileServeHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
fileName := c.URLParams["name"]
|
||||
|
||||
metadata, err := checkFile(fileName)
|
||||
if err == backends.NotFoundErr {
|
||||
notFoundHandler(c, w, r)
|
||||
return
|
||||
} else if err != nil {
|
||||
oopsHandler(c, w, r, RespAUTO, "Corrupt metadata.")
|
||||
return
|
||||
}
|
||||
func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request, fileName string, metadata backends.Metadata) {
|
||||
var expiryHuman string
|
||||
if metadata.Expiry != expiry.NeverExpire {
|
||||
expiryHuman = humanize.RelTime(time.Now(), metadata.Expiry, "", "")
|
||||
|
@ -118,7 +100,7 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
bytes, err := ioutil.ReadAll(reader)
|
||||
if err == nil {
|
||||
extra["extension"] = extension
|
||||
extra["lang_hl"], extra["lang_ace"] = extensionToHlAndAceLangs(extension)
|
||||
extra["lang_hl"] = extensionToHlLang(extension)
|
||||
extra["contents"] = string(bytes)
|
||||
tpl = Templates["display/bin.html"]
|
||||
}
|
||||
|
@ -130,7 +112,7 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
tpl = Templates["display/file.html"]
|
||||
}
|
||||
|
||||
err = renderTemplate(tpl, pongo2.Context{
|
||||
err := renderTemplate(tpl, pongo2.Context{
|
||||
"mime": metadata.Mimetype,
|
||||
"filename": fileName,
|
||||
"size": sizeHuman,
|
||||
|
@ -140,6 +122,7 @@ func fileDisplayHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
"forcerandom": Config.forceRandomFilename,
|
||||
"lines": lines,
|
||||
"files": metadata.ArchiveFiles,
|
||||
"siteurl": strings.TrimSuffix(getSiteURL(r), "/"),
|
||||
}, r, w)
|
||||
|
||||
if err != nil {
|
||||
|
|
21
fileserve.go
21
fileserve.go
|
@ -2,7 +2,6 @@ package main
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
@ -27,6 +26,16 @@ func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if src, err := checkAccessKey(r, &metadata); err != nil {
|
||||
// remove invalid cookie
|
||||
if src == accessKeySourceCookie {
|
||||
setAccessKeyCookies(w, getSiteURL(r), fileName, "", time.Unix(0, 0))
|
||||
}
|
||||
unauthorizedHandler(c, w, r)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
if !Config.allowHotlink {
|
||||
referer := r.Header.Get("Referer")
|
||||
u, _ := url.Parse(referer)
|
||||
|
@ -51,15 +60,11 @@ func fileServeHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
if r.Method != "HEAD" {
|
||||
_, reader, err := storageBackend.Get(fileName)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespAUTO, "Unable to open file.")
|
||||
return
|
||||
}
|
||||
defer reader.Close()
|
||||
|
||||
if _, err = io.CopyN(w, reader, metadata.Size); err != nil {
|
||||
storageBackend.ServeFile(fileName, w, r)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespAUTO, err.Error())
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
19
go.mod
Normal file
19
go.mod
Normal file
|
@ -0,0 +1,19 @@
|
|||
module github.com/andreimarcu/linx-server
|
||||
|
||||
go 1.14
|
||||
|
||||
require (
|
||||
github.com/GeertJohan/go.rice v1.0.0
|
||||
github.com/aws/aws-sdk-go v1.29.19
|
||||
github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5
|
||||
github.com/dustin/go-humanize v1.0.0
|
||||
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4
|
||||
github.com/gabriel-vasile/mimetype v1.1.1
|
||||
github.com/microcosm-cc/bluemonday v1.0.2
|
||||
github.com/minio/sha256-simd v0.1.1
|
||||
github.com/russross/blackfriday v1.5.1
|
||||
github.com/vharitonsky/iniflags v0.0.0-20180513140207-a33cd0b5f3de
|
||||
github.com/zeebo/bencode v1.0.0
|
||||
github.com/zenazn/goji v0.9.0
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073
|
||||
)
|
76
go.sum
Normal file
76
go.sum
Normal file
|
@ -0,0 +1,76 @@
|
|||
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
|
||||
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=
|
||||
github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0=
|
||||
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
|
||||
github.com/aws/aws-sdk-go v1.29.19 h1:+jifYixffn6kzWygtGWFWQMv0tDGyISZHNwugF9V2sE=
|
||||
github.com/aws/aws-sdk-go v1.29.19/go.mod h1:1KvfttTE3SPKMpo8g2c6jL3ZKfXtFvKscTgahTma5Xg=
|
||||
github.com/daaku/go.zipexe v1.0.0 h1:VSOgZtH418pH9L16hC/JrgSNJbbAL26pj7lmD1+CGdY=
|
||||
github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5 h1:RAV05c0xOkJ3dZGS0JFybxFKZ2WMLabgx3uXnd7rpGs=
|
||||
github.com/dchest/uniuri v0.0.0-20200228104902-7aecb25e1fe5/go.mod h1:GgB8SF9nRG+GqaDtLcwJZsQFhcogVCJ79j4EdT0c2V4=
|
||||
github.com/dustin/go-humanize v1.0.0 h1:VSnTsYCnlFHaM2/igO1h6X3HA71jcobQuxemgkq4zYo=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4 h1:GY1+t5Dr9OKADM64SYnQjw/w99HMYvQ0A8/JoUkxVmc=
|
||||
github.com/flosch/pongo2 v0.0.0-20190707114632-bbf5a6c351f4/go.mod h1:T9YF2M40nIgbVgp3rreNmTged+9HrbNTIQf1PsaIiTA=
|
||||
github.com/gabriel-vasile/mimetype v1.1.1 h1:qbN9MPuRf3bstHu9zkI9jDWNfH//9+9kHxr9oRBBBOA=
|
||||
github.com/gabriel-vasile/mimetype v1.1.1/go.mod h1:6CDPel/o/3/s4+bp6kIbsWATq8pmgOisOPG40CJa6To=
|
||||
github.com/go-check/check v0.0.0-20180628173108-788fd7840127 h1:0gkP6mzaMqkmpcJYCFOLkIBwI7xFExG03bbkOkCvUPI=
|
||||
github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af h1:pmfjZENx5imkbgOkpRUYLnmbU7UEFbjtDA2hxJ1ichM=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
|
||||
github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5 h1:rhqTjzJlm7EbkELJDKMTU7udov+Se0xZkWmugr6zGok=
|
||||
github.com/juju/errors v0.0.0-20181118221551-089d3ea4e4d5/go.mod h1:W54LbzXuIE0boCoNJfwqpmkKJ1O4TCTZMetAt6jGk7Q=
|
||||
github.com/juju/loggo v0.0.0-20180524022052-584905176618 h1:MK144iBQF9hTSwBW/9eJm034bVoG30IshVm688T2hi8=
|
||||
github.com/juju/loggo v0.0.0-20180524022052-584905176618/go.mod h1:vgyd7OREkbtVEN/8IXZe5Ooef3LQePvuBm9UWj6ZL8U=
|
||||
github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073 h1:WQM1NildKThwdP7qWrNAFGzp4ijNLw8RlgENkaI4MJs=
|
||||
github.com/juju/testing v0.0.0-20180920084828-472a3e8b2073/go.mod h1:63prj8cnj0tU0S9OHjGJn+b1h0ZghCndfnbQolrYTwA=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw=
|
||||
github.com/microcosm-cc/bluemonday v1.0.2 h1:5lPfLTTAvAbtS0VqT+94yOtFnGfUWYyx0+iToC3Os3s=
|
||||
github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc=
|
||||
github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU=
|
||||
github.com/minio/sha256-simd v0.1.1/go.mod h1:B5e1o+1/KgNmWrSQK08Y6Z1Vb5pwIktudl0J58iy0KM=
|
||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/russross/blackfriday v1.5.1 h1:B8ZN6pD4PVofmlDCDUdELeYrbsVIDM/bpjW3v3zgcRc=
|
||||
github.com/russross/blackfriday v1.5.1/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||
github.com/vharitonsky/iniflags v0.0.0-20180513140207-a33cd0b5f3de h1:fkw+7JkxF3U1GzQoX9h69Wvtvxajo5Rbzy6+YMMzPIg=
|
||||
github.com/vharitonsky/iniflags v0.0.0-20180513140207-a33cd0b5f3de/go.mod h1:irMhzlTz8+fVFj6CH2AN2i+WI5S6wWFtK3MBCIxIpyI=
|
||||
github.com/zeebo/bencode v1.0.0 h1:zgop0Wu1nu4IexAZeCZ5qbsjU4O1vMrfCrVgUjbHVuA=
|
||||
github.com/zeebo/bencode v1.0.0/go.mod h1:Ct7CkrWIQuLWAy9M3atFHYq4kG9Ao/SsY5cdtCXmp9Y=
|
||||
github.com/zenazn/goji v0.9.0 h1:RSQQAbXGArQ0dIDEq+PI6WqN6if+5KHu6x2Cx/GXLTQ=
|
||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073 h1:xMPOj6Pz6UipU1wXLkrtqpHbR0AVFnyPEQq/wRWz9lM=
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI=
|
||||
golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce h1:xcEWjVhvbDy+nHP67nPDDpbYrY+ILlfndk4bRioVHaU=
|
||||
gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|
@ -1,49 +1,56 @@
|
|||
package helpers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"unicode"
|
||||
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/gabriel-vasile/mimetype"
|
||||
"github.com/minio/sha256-simd"
|
||||
"gopkg.in/h2non/filetype.v1"
|
||||
)
|
||||
|
||||
func DetectMime(r io.ReadSeeker) (string, error) {
|
||||
func GenerateMetadata(r io.Reader) (m backends.Metadata, err error) {
|
||||
// Since we don't have the ability to seek within a file, we can use a
|
||||
// Buffer in combination with a TeeReader to keep a copy of the bytes
|
||||
// we read when detecting the file type. These bytes are still needed
|
||||
// to hash the file and determine its size and cannot be discarded.
|
||||
var buf bytes.Buffer
|
||||
teeReader := io.TeeReader(r, &buf)
|
||||
|
||||
// Get first 512 bytes for mimetype detection
|
||||
header := make([]byte, 512)
|
||||
|
||||
r.Seek(0, 0)
|
||||
r.Read(header)
|
||||
r.Seek(0, 0)
|
||||
|
||||
kind, err := filetype.Match(header)
|
||||
headerlen, err := teeReader.Read(header)
|
||||
if err != nil {
|
||||
return "application/octet-stream", err
|
||||
} else if kind.MIME.Value != "" {
|
||||
return kind.MIME.Value, nil
|
||||
return
|
||||
}
|
||||
|
||||
// Check if the file seems anything like text
|
||||
if printable(header) {
|
||||
return "text/plain", nil
|
||||
} else {
|
||||
return "application/octet-stream", nil
|
||||
}
|
||||
}
|
||||
|
||||
func Sha256sum(r io.ReadSeeker) (string, error) {
|
||||
// Create a Hash and a MultiReader that includes the Buffer we created
|
||||
// above along with the original Reader, which will have the rest of
|
||||
// the file.
|
||||
hasher := sha256.New()
|
||||
multiReader := io.MultiReader(&buf, r)
|
||||
|
||||
r.Seek(0, 0)
|
||||
_, err := io.Copy(hasher, r)
|
||||
// Copy everything into the Hash, then use the number of bytes written
|
||||
// as the file size.
|
||||
var readLen int64
|
||||
readLen, err = io.Copy(hasher, multiReader)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return
|
||||
} else {
|
||||
m.Size += readLen
|
||||
}
|
||||
|
||||
r.Seek(0, 0)
|
||||
// Get the hex-encoded string version of the Hash checksum
|
||||
m.Sha256sum = hex.EncodeToString(hasher.Sum(nil))
|
||||
|
||||
return hex.EncodeToString(hasher.Sum(nil)), nil
|
||||
// Use the bytes we extracted earlier and attempt to determine the file
|
||||
// type
|
||||
kind := mimetype.Detect(header[:headerlen])
|
||||
m.Mimetype = kind.String()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
func printable(data []byte) bool {
|
||||
|
|
73
helpers/helpers_test.go
Normal file
73
helpers/helpers_test.go
Normal file
|
@ -0,0 +1,73 @@
|
|||
package helpers
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"strings"
|
||||
"testing"
|
||||
"unicode/utf16"
|
||||
)
|
||||
|
||||
func TestGenerateMetadata(t *testing.T) {
|
||||
r := strings.NewReader("This is my test content")
|
||||
m, err := GenerateMetadata(r)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
expectedSha256sum := "966152d20a77e739716a625373ee15af16e8f4aec631a329a27da41c204b0171"
|
||||
if m.Sha256sum != expectedSha256sum {
|
||||
t.Fatalf("Sha256sum was %q instead of expected value of %q", m.Sha256sum, expectedSha256sum)
|
||||
}
|
||||
|
||||
expectedMimetype := "text/plain; charset=utf-8"
|
||||
if m.Mimetype != expectedMimetype {
|
||||
t.Fatalf("Mimetype was %q instead of expected value of %q", m.Mimetype, expectedMimetype)
|
||||
}
|
||||
|
||||
expectedSize := int64(23)
|
||||
if m.Size != expectedSize {
|
||||
t.Fatalf("Size was %d instead of expected value of %d", m.Size, expectedSize)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTextCharsets(t *testing.T) {
|
||||
// verify that different text encodings are detected and passed through
|
||||
orig := "This is a text string"
|
||||
utf16 := utf16.Encode([]rune(orig))
|
||||
utf16LE := make([]byte, len(utf16)*2+2)
|
||||
utf16BE := make([]byte, len(utf16)*2+2)
|
||||
utf8 := []byte(orig)
|
||||
utf16LE[0] = 0xff
|
||||
utf16LE[1] = 0xfe
|
||||
utf16BE[0] = 0xfe
|
||||
utf16BE[1] = 0xff
|
||||
for i := 0; i < len(utf16); i++ {
|
||||
lsb := utf16[i] & 0xff
|
||||
msb := utf16[i] >> 8
|
||||
utf16LE[i*2+2] = byte(lsb)
|
||||
utf16LE[i*2+3] = byte(msb)
|
||||
utf16BE[i*2+2] = byte(msb)
|
||||
utf16BE[i*2+3] = byte(lsb)
|
||||
}
|
||||
|
||||
testcases := []struct {
|
||||
data []byte
|
||||
extension string
|
||||
mimetype string
|
||||
}{
|
||||
{mimetype: "text/plain; charset=utf-8", data: utf8},
|
||||
{mimetype: "text/plain; charset=utf-16le", data: utf16LE},
|
||||
{mimetype: "text/plain; charset=utf-16be", data: utf16BE},
|
||||
}
|
||||
|
||||
for i, testcase := range testcases {
|
||||
r := bytes.NewReader(testcase.data)
|
||||
m, err := GenerateMetadata(r)
|
||||
if err != nil {
|
||||
t.Fatalf("[%d] unexpected error return %v\n", i, err)
|
||||
}
|
||||
if m.Mimetype != testcase.mimetype {
|
||||
t.Errorf("[%d] Expected mimetype '%s', got mimetype '%s'\n", i, testcase.mimetype, m.Mimetype)
|
||||
}
|
||||
}
|
||||
}
|
19
linx-cleanup/README.md
Normal file
19
linx-cleanup/README.md
Normal file
|
@ -0,0 +1,19 @@
|
|||
|
||||
linx-cleanup
|
||||
-------------------------
|
||||
When files expire, access is disabled immediately, but the files and metadata
|
||||
will persist on disk until someone attempts to access them.
|
||||
|
||||
If you'd like to automatically clean up files that have expired, you can use the included `linx-cleanup` utility. To run it automatically, use a cronjob or similar type
|
||||
of scheduled task.
|
||||
|
||||
You should be careful to ensure that only one instance of `linx-cleanup` runs at
|
||||
a time to avoid unexpected behavior. It does not implement any type of locking.
|
||||
|
||||
|
||||
|Option|Description
|
||||
|------|-----------
|
||||
| ```-filespath files/``` | Path to stored uploads (default is files/)
|
||||
| ```-nologs``` | (optionally) disable deletion logs in stdout
|
||||
| ```-metapath meta/``` | Path to stored information about uploads (default is meta/)
|
||||
|
23
linx-cleanup/linx-cleanup.go
Normal file
23
linx-cleanup/linx-cleanup.go
Normal file
|
@ -0,0 +1,23 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"flag"
|
||||
|
||||
"github.com/andreimarcu/linx-server/cleanup"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var filesDir string
|
||||
var metaDir string
|
||||
var noLogs bool
|
||||
|
||||
flag.StringVar(&filesDir, "filespath", "files/",
|
||||
"path to files directory")
|
||||
flag.StringVar(&metaDir, "metapath", "meta/",
|
||||
"path to metadata directory")
|
||||
flag.BoolVar(&noLogs, "nologs", false,
|
||||
"don't log deleted files")
|
||||
flag.Parse()
|
||||
|
||||
cleanup.Cleanup(filesDir, metaDir, noLogs)
|
||||
}
|
12
linx-cleanup/linx-cleanup.service
Normal file
12
linx-cleanup/linx-cleanup.service
Normal file
|
@ -0,0 +1,12 @@
|
|||
[Unit]
|
||||
Description=Self-hosted file/code/media sharing (expired files cleanup)
|
||||
After=network.target
|
||||
|
||||
[Service]
|
||||
User=linx
|
||||
Group=linx
|
||||
ExecStart=/usr/bin/linx-cleanup
|
||||
WorkingDirectory=/srv/linx/
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
8
linx-cleanup/linx-cleanup.timer
Normal file
8
linx-cleanup/linx-cleanup.timer
Normal file
|
@ -0,0 +1,8 @@
|
|||
[Unit]
|
||||
Description=Run linx-cleanup every hour
|
||||
|
||||
[Timer]
|
||||
OnUnitActiveSec=1h
|
||||
|
||||
[Install]
|
||||
WantedBy=timers.target
|
12
linx-server.conf.example
Normal file
12
linx-server.conf.example
Normal file
|
@ -0,0 +1,12 @@
|
|||
|
||||
bind = 127.0.0.1:8080
|
||||
sitename = myLinx
|
||||
siteurl = https://mylinx.example.org/
|
||||
selifpath = s
|
||||
maxsize = 4294967296
|
||||
maxexpiry = 86400
|
||||
allowhotlink = true
|
||||
remoteuploads = true
|
||||
nologs = true
|
||||
force-random-filename = false
|
||||
cleanup-every-minutes = 5
|
15
pages.go
15
pages.go
|
@ -50,6 +50,21 @@ func apiDocHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
}
|
||||
|
||||
func makeCustomPageHandler(fileName string) func(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
return func(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
err := renderTemplate(Templates["custom_page.html"], pongo2.Context{
|
||||
"siteurl": getSiteURL(r),
|
||||
"forcerandom": Config.forceRandomFilename,
|
||||
"contents": customPages[fileName],
|
||||
"filename": fileName,
|
||||
"pagename": customPagesNames[fileName],
|
||||
}, r, w)
|
||||
if err != nil {
|
||||
oopsHandler(c, w, r, RespHTML, "")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func notFoundHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
w.WriteHeader(404)
|
||||
err := renderTemplate(Templates["404.html"], pongo2.Context{}, r, w)
|
||||
|
|
65
server.go
65
server.go
|
@ -8,15 +8,19 @@ import (
|
|||
"net/http/fcgi"
|
||||
"net/url"
|
||||
"os"
|
||||
"os/signal"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"github.com/GeertJohan/go.rice"
|
||||
rice "github.com/GeertJohan/go.rice"
|
||||
"github.com/andreimarcu/linx-server/auth/apikeys"
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/backends/localfs"
|
||||
"github.com/andreimarcu/linx-server/backends/s3"
|
||||
"github.com/andreimarcu/linx-server/cleanup"
|
||||
"github.com/flosch/pongo2"
|
||||
"github.com/vharitonsky/iniflags"
|
||||
"github.com/zenazn/goji/graceful"
|
||||
|
@ -57,6 +61,7 @@ var Config struct {
|
|||
allowHotlink bool
|
||||
fastcgi bool
|
||||
remoteUploads bool
|
||||
basicAuth bool
|
||||
authFile string
|
||||
remoteAuthFile string
|
||||
addHeaders headerList
|
||||
|
@ -66,6 +71,9 @@ var Config struct {
|
|||
s3Bucket string
|
||||
s3ForcePathStyle bool
|
||||
forceRandomFilename bool
|
||||
accessKeyCookieExpiry uint64
|
||||
customPagesDir string
|
||||
cleanupEveryMinutes uint64
|
||||
}
|
||||
|
||||
var Templates = make(map[string]*pongo2.Template)
|
||||
|
@ -76,6 +84,8 @@ var timeStartedStr string
|
|||
var remoteAuthKeys []string
|
||||
var metaStorageBackend backends.MetaStorageBackend
|
||||
var storageBackend backends.StorageBackend
|
||||
var customPages = make(map[string]string)
|
||||
var customPagesNames = make(map[string]string)
|
||||
|
||||
func setup() *web.Mux {
|
||||
mux := web.New()
|
||||
|
@ -101,9 +111,12 @@ func setup() *web.Mux {
|
|||
mux.Use(AddHeaders(Config.addHeaders))
|
||||
|
||||
if Config.authFile != "" {
|
||||
mux.Use(UploadAuth(AuthOptions{
|
||||
mux.Use(apikeys.NewApiKeysMiddleware(apikeys.AuthOptions{
|
||||
AuthFile: Config.authFile,
|
||||
UnauthMethods: []string{"GET", "HEAD", "OPTIONS", "TRACE"},
|
||||
BasicAuth: Config.basicAuth,
|
||||
SiteName: Config.siteName,
|
||||
SitePath: Config.sitePath,
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -143,6 +156,10 @@ func setup() *web.Mux {
|
|||
storageBackend = s3.NewS3Backend(Config.s3Bucket, Config.s3Region, Config.s3Endpoint, Config.s3ForcePathStyle)
|
||||
} else {
|
||||
storageBackend = localfs.NewLocalfsBackend(Config.metaDir, Config.filesDir)
|
||||
if Config.cleanupEveryMinutes > 0 {
|
||||
go cleanup.PeriodicCleanup(time.Duration(Config.cleanupEveryMinutes)*time.Minute, Config.filesDir, Config.metaDir, Config.noLogs)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// Template setup
|
||||
|
@ -166,7 +183,7 @@ func setup() *web.Mux {
|
|||
selifIndexRe := regexp.MustCompile("^" + Config.sitePath + Config.selifPath + `$`)
|
||||
torrentRe := regexp.MustCompile("^" + Config.sitePath + `(?P<name>[a-z0-9-\.]+)/torrent$`)
|
||||
|
||||
if Config.authFile == "" {
|
||||
if Config.authFile == "" || Config.basicAuth {
|
||||
mux.Get(Config.sitePath, indexHandler)
|
||||
mux.Get(Config.sitePath+"paste/", pasteHandler)
|
||||
} else {
|
||||
|
@ -183,7 +200,7 @@ func setup() *web.Mux {
|
|||
mux.Get(Config.sitePath+"upload/", uploadRemote)
|
||||
|
||||
if Config.remoteAuthFile != "" {
|
||||
remoteAuthKeys = readAuthKeys(Config.remoteAuthFile)
|
||||
remoteAuthKeys = apikeys.ReadAuthKeys(Config.remoteAuthFile)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,11 +215,20 @@ func setup() *web.Mux {
|
|||
mux.Get(Config.sitePath+"static/*", staticHandler)
|
||||
mux.Get(Config.sitePath+"favicon.ico", staticHandler)
|
||||
mux.Get(Config.sitePath+"robots.txt", staticHandler)
|
||||
mux.Get(nameRe, fileDisplayHandler)
|
||||
mux.Get(nameRe, fileAccessHandler)
|
||||
mux.Post(nameRe, fileAccessHandler)
|
||||
mux.Get(selifRe, fileServeHandler)
|
||||
mux.Get(selifIndexRe, unauthorizedHandler)
|
||||
mux.Get(torrentRe, fileTorrentHandler)
|
||||
|
||||
if Config.customPagesDir != "" {
|
||||
initializeCustomPages(Config.customPagesDir)
|
||||
for fileName := range customPagesNames {
|
||||
mux.Get(Config.sitePath+fileName, makeCustomPageHandler(fileName))
|
||||
mux.Get(Config.sitePath+fileName+"/", makeCustomPageHandler(fileName))
|
||||
}
|
||||
}
|
||||
|
||||
mux.NotFound(notFoundHandler)
|
||||
|
||||
return mux
|
||||
|
@ -215,6 +241,8 @@ func main() {
|
|||
"path to files directory")
|
||||
flag.StringVar(&Config.metaDir, "metapath", "meta/",
|
||||
"path to metadata directory")
|
||||
flag.BoolVar(&Config.basicAuth, "basicauth", false,
|
||||
"allow logging by basic auth password")
|
||||
flag.BoolVar(&Config.noLogs, "nologs", false,
|
||||
"remove stdout output for each request")
|
||||
flag.BoolVar(&Config.allowHotlink, "allowhotlink", false,
|
||||
|
@ -271,13 +299,38 @@ func main() {
|
|||
"Force path-style addressing for S3 (e.g. https://s3.amazonaws.com/linx/example.txt)")
|
||||
flag.BoolVar(&Config.forceRandomFilename, "force-random-filename", false,
|
||||
"Force all uploads to use a random filename")
|
||||
flag.Uint64Var(&Config.accessKeyCookieExpiry, "access-cookie-expiry", 0, "Expiration time for access key cookies in seconds (set 0 to use session cookies)")
|
||||
flag.StringVar(&Config.customPagesDir, "custompagespath", "",
|
||||
"path to directory containing .md files to render as custom pages")
|
||||
flag.Uint64Var(&Config.cleanupEveryMinutes, "cleanup-every-minutes", 0,
|
||||
"How often to clean up expired files in minutes (default is 0, which means files will be cleaned up as they are accessed)")
|
||||
|
||||
iniflags.Parse()
|
||||
|
||||
mux := setup()
|
||||
|
||||
if Config.fastcgi {
|
||||
listener, err := net.Listen("tcp", Config.bind)
|
||||
var listener net.Listener
|
||||
var err error
|
||||
if Config.bind[0] == '/' {
|
||||
// UNIX path
|
||||
listener, err = net.ListenUnix("unix", &net.UnixAddr{Name: Config.bind, Net: "unix"})
|
||||
cleanup := func() {
|
||||
log.Print("Removing FastCGI socket")
|
||||
os.Remove(Config.bind)
|
||||
}
|
||||
defer cleanup()
|
||||
sigs := make(chan os.Signal, 1)
|
||||
signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM)
|
||||
go func() {
|
||||
sig := <-sigs
|
||||
log.Print("Signal: ", sig)
|
||||
cleanup()
|
||||
os.Exit(0)
|
||||
}()
|
||||
} else {
|
||||
listener, err = net.Listen("tcp", Config.bind)
|
||||
}
|
||||
if err != nil {
|
||||
log.Fatal("Could not bind: ", err)
|
||||
}
|
||||
|
|
|
@ -44,6 +44,10 @@
|
|||
overflow: auto;
|
||||
}
|
||||
|
||||
.markdown-body li {
|
||||
list-style: unset;
|
||||
}
|
||||
|
||||
.markdown-body code,
|
||||
.markdown-body kbd,
|
||||
.markdown-body pre {
|
||||
|
|
|
@ -2,102 +2,71 @@
|
|||
|
||||
/* Tomorrow Comment */
|
||||
.hljs-comment,
|
||||
.hljs-title {
|
||||
.hljs-quote {
|
||||
color: #8e908c;
|
||||
}
|
||||
|
||||
/* Tomorrow Red */
|
||||
.hljs-variable,
|
||||
.hljs-attribute,
|
||||
.hljs-template-variable,
|
||||
.hljs-tag,
|
||||
.hljs-name,
|
||||
.hljs-selector-id,
|
||||
.hljs-selector-class,
|
||||
.hljs-regexp,
|
||||
.ruby .hljs-constant,
|
||||
.xml .hljs-tag .hljs-title,
|
||||
.xml .hljs-pi,
|
||||
.xml .hljs-doctype,
|
||||
.html .hljs-doctype,
|
||||
.css .hljs-id,
|
||||
.css .hljs-class,
|
||||
.css .hljs-pseudo {
|
||||
.hljs-deletion {
|
||||
color: #c82829;
|
||||
}
|
||||
|
||||
/* Tomorrow Orange */
|
||||
.hljs-number,
|
||||
.hljs-preprocessor,
|
||||
.hljs-pragma,
|
||||
.hljs-built_in,
|
||||
.hljs-builtin-name,
|
||||
.hljs-literal,
|
||||
.hljs-type,
|
||||
.hljs-params,
|
||||
.hljs-constant {
|
||||
.hljs-meta,
|
||||
.hljs-link {
|
||||
color: #f5871f;
|
||||
}
|
||||
|
||||
/* Tomorrow Yellow */
|
||||
.ruby .hljs-class .hljs-title,
|
||||
.css .hljs-rules .hljs-attribute {
|
||||
.hljs-attribute {
|
||||
color: #eab700;
|
||||
}
|
||||
|
||||
/* Tomorrow Green */
|
||||
.hljs-string,
|
||||
.hljs-value,
|
||||
.hljs-inheritance,
|
||||
.hljs-header,
|
||||
.ruby .hljs-symbol,
|
||||
.xml .hljs-cdata {
|
||||
.hljs-symbol,
|
||||
.hljs-bullet,
|
||||
.hljs-addition {
|
||||
color: #718c00;
|
||||
}
|
||||
|
||||
/* Tomorrow Aqua */
|
||||
.css .hljs-hexcolor {
|
||||
color: #3e999f;
|
||||
}
|
||||
|
||||
/* Tomorrow Blue */
|
||||
.hljs-function,
|
||||
.python .hljs-decorator,
|
||||
.python .hljs-title,
|
||||
.ruby .hljs-function .hljs-title,
|
||||
.ruby .hljs-title .hljs-keyword,
|
||||
.perl .hljs-sub,
|
||||
.javascript .hljs-title,
|
||||
.coffeescript .hljs-title {
|
||||
.hljs-title,
|
||||
.hljs-section {
|
||||
color: #4271ae;
|
||||
}
|
||||
|
||||
/* Tomorrow Purple */
|
||||
.hljs-keyword,
|
||||
.javascript .hljs-function {
|
||||
.hljs-selector-tag {
|
||||
color: #8959a8;
|
||||
}
|
||||
|
||||
.hljs {
|
||||
display: block;
|
||||
overflow-x: auto;
|
||||
background: white;
|
||||
color: #4d4d4c;
|
||||
padding: 0.5em;
|
||||
}
|
||||
|
||||
.hljs-line-numbers {
|
||||
text-align: right;
|
||||
border-right: 1px solid #ccc;
|
||||
margin-right: 5px;
|
||||
color: #999;
|
||||
-webkit-touch-callout: none;
|
||||
-webkit-user-select: none;
|
||||
-khtml-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
.hljs-emphasis {
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.coffeescript .javascript,
|
||||
.javascript .xml,
|
||||
.tex .hljs-formula,
|
||||
.xml .javascript,
|
||||
.xml .vbscript,
|
||||
.xml .css,
|
||||
.xml .hljs-cdata {
|
||||
opacity: 0.5;
|
||||
}
|
||||
.hljs-strong {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
|
|
@ -264,6 +264,24 @@ body {
|
|||
margin: 0;
|
||||
}
|
||||
|
||||
#access_key {
|
||||
min-width: 100%;
|
||||
line-height: 1.3em;
|
||||
}
|
||||
|
||||
#access_key input, span {
|
||||
vertical-align: middle;
|
||||
}
|
||||
|
||||
#access_key_checkbox {
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
#access_key_input {
|
||||
padding: 0;
|
||||
display: none;
|
||||
}
|
||||
|
||||
.oopscontent {
|
||||
width: 400px;
|
||||
}
|
||||
|
@ -330,6 +348,7 @@ body {
|
|||
|
||||
.display-video {
|
||||
width: 800px;
|
||||
max-height: 70vh;
|
||||
}
|
||||
|
||||
.scrollable {
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 19 KiB |
File diff suppressed because one or more lines are too long
|
@ -1,11 +1,28 @@
|
|||
// @license magnet:?xt=urn:btih:1f739d935676111cfff4b4693e3816e664797050&dn=gpl-3.0.txt GPL-v3-or-Later
|
||||
|
||||
Dropzone.options.dropzone = {
|
||||
init: function() {
|
||||
init: function () {
|
||||
var dzone = document.getElementById("dzone");
|
||||
dzone.style.display = "block";
|
||||
},
|
||||
addedfile: function(file) {
|
||||
addedfile: function (file) {
|
||||
if (!this.options.autoProcessQueue) {
|
||||
var dropzone = this;
|
||||
var xhr = new XMLHttpRequest();
|
||||
xhr.onload = function () {
|
||||
if (xhr.readyState !== XMLHttpRequest.DONE) {
|
||||
return;
|
||||
}
|
||||
if (xhr.status < 400) {
|
||||
dropzone.processQueue()
|
||||
dropzone.options.autoProcessQueue = true;
|
||||
} else {
|
||||
dropzone.cancelUpload(file)
|
||||
}
|
||||
};
|
||||
xhr.open("HEAD", "auth", true);
|
||||
xhr.send()
|
||||
}
|
||||
var upload = document.createElement("div");
|
||||
upload.className = "upload";
|
||||
|
||||
|
@ -22,7 +39,7 @@ Dropzone.options.dropzone = {
|
|||
var cancelAction = document.createElement("span");
|
||||
cancelAction.className = "cancel";
|
||||
cancelAction.innerHTML = "Cancel";
|
||||
cancelAction.addEventListener('click', function(ev) {
|
||||
cancelAction.addEventListener('click', function (ev) {
|
||||
this.removeFile(file);
|
||||
}.bind(this));
|
||||
file.cancelActionElement = cancelAction;
|
||||
|
@ -36,19 +53,19 @@ Dropzone.options.dropzone = {
|
|||
|
||||
document.getElementById("uploads").appendChild(upload);
|
||||
},
|
||||
uploadprogress: function(file, p, bytesSent) {
|
||||
uploadprogress: function (file, p, bytesSent) {
|
||||
p = parseInt(p);
|
||||
file.progressElement.innerHTML = p + "%";
|
||||
file.uploadElement.setAttribute("style", 'background-image: -webkit-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -moz-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -ms-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: -o-linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%); background-image: linear-gradient(left, #F2F4F7 ' + p + '%, #E2E2E2 ' + p + '%)');
|
||||
},
|
||||
sending: function(file, xhr, formData) {
|
||||
sending: function (file, xhr, formData) {
|
||||
var randomize = document.getElementById("randomize");
|
||||
if(randomize != null) {
|
||||
if (randomize != null) {
|
||||
formData.append("randomize", randomize.checked);
|
||||
}
|
||||
formData.append("expires", document.getElementById("expires").value);
|
||||
},
|
||||
success: function(file, resp) {
|
||||
success: function (file, resp) {
|
||||
file.fileActions.removeChild(file.progressElement);
|
||||
|
||||
var fileLabelLink = document.createElement("a");
|
||||
|
@ -62,11 +79,11 @@ Dropzone.options.dropzone = {
|
|||
var deleteAction = document.createElement("span");
|
||||
deleteAction.innerHTML = "Delete";
|
||||
deleteAction.className = "cancel";
|
||||
deleteAction.addEventListener('click', function(ev) {
|
||||
deleteAction.addEventListener('click', function (ev) {
|
||||
xhr = new XMLHttpRequest();
|
||||
xhr.open("DELETE", resp.url, true);
|
||||
xhr.setRequestHeader("Linx-Delete-Key", resp.delete_key);
|
||||
xhr.onreadystatechange = function(file) {
|
||||
xhr.onreadystatechange = function (file) {
|
||||
if (xhr.readyState == 4 && xhr.status === 200) {
|
||||
var text = document.createTextNode("Deleted ");
|
||||
file.fileLabel.insertBefore(text, file.fileLabelLink);
|
||||
|
@ -80,12 +97,15 @@ Dropzone.options.dropzone = {
|
|||
file.cancelActionElement = deleteAction;
|
||||
file.fileActions.appendChild(deleteAction);
|
||||
},
|
||||
error: function(file, resp, xhrO) {
|
||||
canceled: function (file) {
|
||||
this.options.error(file);
|
||||
},
|
||||
error: function (file, resp, xhrO) {
|
||||
file.fileActions.removeChild(file.cancelActionElement);
|
||||
file.fileActions.removeChild(file.progressElement);
|
||||
|
||||
if (file.status === "canceled") {
|
||||
file.fileLabel.innerHTML = file.name + ": Canceled ";
|
||||
file.fileLabel.innerHTML = file.name + ": Canceled ";
|
||||
}
|
||||
else {
|
||||
if (resp.error) {
|
||||
|
@ -101,15 +121,16 @@ Dropzone.options.dropzone = {
|
|||
file.fileLabel.className = "error";
|
||||
},
|
||||
|
||||
autoProcessQueue: document.getElementById("dropzone").getAttribute("data-auth") !== "basic",
|
||||
maxFilesize: Math.round(parseInt(document.getElementById("dropzone").getAttribute("data-maxsize"), 10) / 1024 / 1024),
|
||||
previewsContainer: "#uploads",
|
||||
parallelUploads: 5,
|
||||
headers: {"Accept": "application/json"},
|
||||
headers: { "Accept": "application/json" },
|
||||
dictDefaultMessage: "Click or Drop file(s) or Paste image",
|
||||
dictFallbackMessage: ""
|
||||
};
|
||||
|
||||
document.onpaste = function(event) {
|
||||
document.onpaste = function (event) {
|
||||
var items = (event.clipboardData || event.originalEvent.clipboardData).items;
|
||||
for (index in items) {
|
||||
var item = items[index];
|
||||
|
@ -119,4 +140,15 @@ document.onpaste = function(event) {
|
|||
}
|
||||
};
|
||||
|
||||
document.getElementById("access_key_checkbox").onchange = function (event) {
|
||||
if (event.target.checked) {
|
||||
document.getElementById("access_key_input").style.display = "inline-block";
|
||||
document.getElementById("access_key_text").style.display = "none";
|
||||
} else {
|
||||
document.getElementById("access_key_input").value = "";
|
||||
document.getElementById("access_key_input").style.display = "none";
|
||||
document.getElementById("access_key_text").style.display = "inline-block";
|
||||
}
|
||||
};
|
||||
|
||||
// @end-license
|
||||
|
|
16
templates.go
16
templates.go
|
@ -8,7 +8,7 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/GeertJohan/go.rice"
|
||||
rice "github.com/GeertJohan/go.rice"
|
||||
"github.com/flosch/pongo2"
|
||||
)
|
||||
|
||||
|
@ -51,6 +51,8 @@ func populateTemplatesMap(tSet *pongo2.TemplateSet, tMap map[string]*pongo2.Temp
|
|||
"401.html",
|
||||
"404.html",
|
||||
"oops.html",
|
||||
"access.html",
|
||||
"custom_page.html",
|
||||
|
||||
"display/audio.html",
|
||||
"display/image.html",
|
||||
|
@ -84,7 +86,17 @@ func renderTemplate(tpl *pongo2.Template, context pongo2.Context, r *http.Reques
|
|||
|
||||
context["sitepath"] = Config.sitePath
|
||||
context["selifpath"] = Config.selifPath
|
||||
context["using_auth"] = Config.authFile != ""
|
||||
context["custom_pages_names"] = customPagesNames
|
||||
|
||||
var a string
|
||||
if Config.authFile == "" {
|
||||
a = "none"
|
||||
} else if Config.basicAuth {
|
||||
a = "basic"
|
||||
} else {
|
||||
a = "header"
|
||||
}
|
||||
context["auth"] = a
|
||||
|
||||
return tpl.ExecuteWriter(context, writer)
|
||||
}
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - 404 Not Found{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div class="error-404">
|
||||
<a href="{{ sitepath }}"><img src='{{ sitepath }}static/images/404.jpg'></a>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
|
@ -1,62 +1,71 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - API{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<link href="{{ sitepath }}static/css/github-markdown.css" rel="stylesheet" type="text/css">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="main">
|
||||
<div id='inner_content'>
|
||||
<div id='inner_content'>
|
||||
<div class="normal markdown-body">
|
||||
|
||||
<h2>API</h2>
|
||||
|
||||
<h3>Client</h3>
|
||||
<p>To simplify uploading and deleting files, you can use <a target="_blank" href="https://github.com/andreimarcu/linx-client">linx-client</a>, which uses this API.</p>
|
||||
<p>To simplify uploading and deleting files, you can use <a target="_blank"
|
||||
href="https://github.com/andreimarcu/linx-client">linx-client</a>, which uses this API.</p>
|
||||
|
||||
{% if using_auth %}
|
||||
{% if auth != "none" %}
|
||||
<h3>Keys</h3>
|
||||
<p>This instance uses API Keys, therefore you will need to provide a key for uploading and deleting files.<br/> To do so, add the <code>Linx-Api-Key</code> header with your key.</p>
|
||||
<p>This instance uses API Keys, therefore you will need to provide a key for uploading and deleting
|
||||
files.<br /> To do so, add the <code>Linx-Api-Key</code> header with your key.</p>
|
||||
{% endif %}
|
||||
|
||||
<h3>Uploading a file</h3>
|
||||
|
||||
<p>To upload a file, make a PUT request to <code>{{ siteurl }}upload/</code> and you will get the url of your upload back.</p>
|
||||
<p>To upload a file, make a PUT request to <code>{{ siteurl }}upload/</code> and you will get the url of
|
||||
your upload back.</p>
|
||||
|
||||
<p><strong>Optional headers with the request</strong></p>
|
||||
|
||||
{% if not forcerandom %}
|
||||
<p>Randomize the filename<br/>
|
||||
<code>Linx-Randomize: yes</code></p>
|
||||
{% endif %}
|
||||
{% if not forcerandom %}
|
||||
<p>Randomize the filename<br />
|
||||
<code>Linx-Randomize: yes</code></p>
|
||||
{% endif %}
|
||||
|
||||
<p>Specify a custom deletion key<br/>
|
||||
<code>Linx-Delete-Key: mysecret</code></p>
|
||||
<p>Specify a custom deletion key<br />
|
||||
<code>Linx-Delete-Key: mysecret</code></p>
|
||||
|
||||
<p>Specify an expiration time (in seconds)<br/>
|
||||
<code>Linx-Expiry: 60</code></p>
|
||||
<p>Protect file with password<br />
|
||||
<code>Linx-Access-Key: mysecret</code></p>
|
||||
|
||||
<p>Get a json response<br/>
|
||||
<code>Accept: application/json</code></p>
|
||||
<p>Specify an expiration time (in seconds)<br />
|
||||
<code>Linx-Expiry: 60</code></p>
|
||||
|
||||
<p>Get a json response<br />
|
||||
<code>Accept: application/json</code></p>
|
||||
|
||||
<p>The json response will then contain:</p>
|
||||
|
||||
<blockquote>
|
||||
<p>“url”: the publicly available upload url<br/>
|
||||
“direct_url”: the url to access the file directly<br/>
|
||||
“filename”: the (optionally generated) filename<br/>
|
||||
“delete_key”: the (optionally generated) deletion key,<br/>
|
||||
“expiry”: the unix timestamp at which the file will expire (0 if never)<br/>
|
||||
“size”: the size in bytes of the file<br/>
|
||||
“mimetype”: the guessed mimetype of the file<br/>
|
||||
“sha256sum”: the sha256sum of the file,</p>
|
||||
<p>“url”: the publicly available upload url<br />
|
||||
“direct_url”: the url to access the file directly<br />
|
||||
“filename”: the (optionally generated) filename<br />
|
||||
“delete_key”: the (optionally generated) deletion key,<br />
|
||||
“access_key”: the (optionally supplied) access key,<br />
|
||||
“expiry”: the unix timestamp at which the file will expire (0 if never)<br />
|
||||
“size”: the size in bytes of the file<br />
|
||||
“mimetype”: the guessed mimetype of the file<br />
|
||||
“sha256sum”: the sha256sum of the file,</p>
|
||||
</blockquote>
|
||||
|
||||
<p><strong>Examples</strong></p>
|
||||
|
||||
<p>Uploading myphoto.jpg</p>
|
||||
|
||||
{% if using_auth %}
|
||||
{% if auth != "none" %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}7z4h4ut.jpg{% endif %}</code></pre>
|
||||
{% else %}
|
||||
|
@ -66,7 +75,7 @@
|
|||
|
||||
<p>Uploading myphoto.jpg with an expiry of 20 minutes</p>
|
||||
|
||||
{% if using_auth %}
|
||||
{% if auth != "none" %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Linx-Expiry: 1200" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{{ siteurl }}{% if not forcerandom %}myphoto.jpg{% else %}jm295snf.jpg{% endif %}</code></pre>
|
||||
{% else %}
|
||||
|
@ -76,7 +85,7 @@
|
|||
|
||||
<p>Uploading myphoto.jpg with a random filename and getting a json response:</p>
|
||||
|
||||
{% if using_auth %}
|
||||
{% if auth != "none" %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Accept: application/json"{% if not forcerandom %} -H "Linx-Randomize: yes"{% endif %} -T myphoto.jpg {{ siteurl }}upload/
|
||||
{"delete_key":"...","expiry":"0","filename":"f34h4iu.jpg","mimetype":"image/jpeg",
|
||||
"sha256sum":"...","size":"...","url":"{{ siteurl }}f34h4iu.jpg"}</code></pre>
|
||||
|
@ -88,13 +97,14 @@
|
|||
|
||||
<h3>Overwriting a file</h3>
|
||||
|
||||
<p>To overwrite a file you uploaded, simply provide the <code>Linx-Delete-Key</code> header with the original file's deletion key.</p>
|
||||
<p>To overwrite a file you uploaded, simply provide the <code>Linx-Delete-Key</code> header with the
|
||||
original file's deletion key.</p>
|
||||
|
||||
<p><strong>Example</p></strong>
|
||||
|
||||
<p>To overwrite myphoto.jpg</p>
|
||||
|
||||
{% if using_auth %}
|
||||
{% if auth != "none" %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Linx-Delete-Key: mysecret" -T myphoto.jpg {{ siteurl }}upload/
|
||||
{{ siteurl }}myphoto.jpg</code></pre>
|
||||
{% else %}
|
||||
|
@ -104,13 +114,14 @@
|
|||
|
||||
<h3>Deleting a file</h3>
|
||||
|
||||
<p>To delete a file you uploaded, make a DELETE request to <code>{{ siteurl }}yourfile.ext</code> with the delete key set as the <code>Linx-Delete-Key</code> header.</p>
|
||||
<p>To delete a file you uploaded, make a DELETE request to <code>{{ siteurl }}yourfile.ext</code> with the
|
||||
delete key set as the <code>Linx-Delete-Key</code> header.</p>
|
||||
|
||||
<p><strong>Example</strong></p>
|
||||
|
||||
<p>To delete myphoto.jpg</p>
|
||||
|
||||
{% if using_auth %}
|
||||
{% if auth != "none" %}
|
||||
<pre><code>$ curl -H "Linx-Api-Key: mysecretkey" -H "Linx-Delete-Key: mysecret" -X DELETE {{ siteurl }}myphoto.jpg
|
||||
DELETED</code></pre>
|
||||
{% else %}
|
||||
|
@ -120,16 +131,17 @@ DELETED</code></pre>
|
|||
|
||||
<h3>Information about a file</h3>
|
||||
|
||||
<p>To retrieve information about a file, make a GET request the public url with <code>Accept: application/json</code> headers and you will receive a json response containing:</p>
|
||||
<p>To retrieve information about a file, make a GET request the public url with
|
||||
<code>Accept: application/json</code> headers and you will receive a json response containing:</p>
|
||||
|
||||
<blockquote>
|
||||
<p>“url”: the publicly available upload url<br/>
|
||||
“direct_url”: the url to access the file directly<br/>
|
||||
“filename”: the (optionally generated) filename<br/>
|
||||
“expiry”: the unix timestamp at which the file will expire (0 if never)<br/>
|
||||
“size”: the size in bytes of the file<br/>
|
||||
“mimetype”: the guessed mimetype of the file<br/>
|
||||
“sha256sum”: the sha256sum of the file,</p>
|
||||
<p>“url”: the publicly available upload url<br />
|
||||
“direct_url”: the url to access the file directly<br />
|
||||
“filename”: the (optionally generated) filename<br />
|
||||
“expiry”: the unix timestamp at which the file will expire (0 if never)<br />
|
||||
“size”: the size in bytes of the file<br />
|
||||
“mimetype”: the guessed mimetype of the file<br />
|
||||
“sha256sum”: the sha256sum of the file,</p>
|
||||
</blockquote>
|
||||
|
||||
<p><strong>Example</strong></p>
|
||||
|
@ -137,6 +149,6 @@ DELETED</code></pre>
|
|||
<pre><code>$ curl -H "Accept: application/json" {{ siteurl }}myphoto.jpg
|
||||
{"expiry":"0","filename":"myphoto.jpg","mimetype":"image/jpeg","sha256sum":"...","size":"..."}</code></pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
14
templates/access.html
Normal file
14
templates/access.html
Normal file
|
@ -0,0 +1,14 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - Password protected file{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="main" class="oopscontent">
|
||||
<form action="{{ unlockpath }}" method="POST" enctype="multipart/form-data">
|
||||
{{ filename }} is protected with a password: <br /><br />
|
||||
<input name="access_key" type="password" />
|
||||
<input id="submitbtn" type="submit" value="Unlock">
|
||||
<br /><br />
|
||||
</form>
|
||||
</div>
|
||||
{% endblock %}
|
|
@ -1,9 +1,10 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<title>{% block title %}{{ sitename }}{% endblock %}</title>
|
||||
<meta charset='utf-8' content='text/html' http-equiv='content-type'>
|
||||
<meta name='viewport' content='width=device-width, initial-scale=1.0'>
|
||||
<meta name='viewport' content='width=device-width, initial-scale=0.8'>
|
||||
<link href='{{ sitepath }}static/css/linx.css?v=1' media='screen, projection' rel='stylesheet' type='text/css'>
|
||||
<link href='{{ sitepath }}static/css/hint.css' rel='stylesheet' type='text/css'>
|
||||
<link href='{{ sitepath }}static/images/favicon.gif' rel='icon' type='image/gif'>
|
||||
|
@ -15,22 +16,26 @@
|
|||
<div id="container">
|
||||
<div id="header">
|
||||
<div id="navigation" class="right">
|
||||
{% if !using_auth %}
|
||||
<a href="{{ sitepath }}">Upload</a> |
|
||||
<a href="{{ sitepath }}paste/">Paste</a> |
|
||||
{% if auth != "header" %}
|
||||
<a href="{{ sitepath }}">Upload</a> |
|
||||
<a href="{{ sitepath }}paste/">Paste</a> |
|
||||
{% endif %}
|
||||
<a href="{{ sitepath }}API/">API</a>
|
||||
{% for custom_file_name, custom_page_name in custom_pages_names sorted %}
|
||||
| <a href="{{ sitepath }}{{ custom_file_name }}/">{{ custom_page_name }}</a>
|
||||
{% endfor %}
|
||||
</div>
|
||||
<h2><a href="{{ sitepath }}" title="{{ sitename }}">{{ sitename }}</a></h2>
|
||||
<h2><a href="{{ sitepath }}" title="{{ sitename }}">{{ sitename }}</a></h2>
|
||||
</div>
|
||||
|
||||
{% block content %}{% endblock %}
|
||||
|
||||
<div id="footer">
|
||||
<a href="https://github.com/andreimarcu/linx-server">linx</a>
|
||||
<a href="https://github.com/andreimarcu/linx-server">linx-server</a>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
</html>
|
19
templates/custom_page.html
Normal file
19
templates/custom_page.html
Normal file
|
@ -0,0 +1,19 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block title %}{{sitename}} - {{ pagename }}{% endblock %}
|
||||
|
||||
{% block head %}
|
||||
<link href="{{ sitepath }}static/css/github-markdown.css" rel="stylesheet" type="text/css">
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<div id="main">
|
||||
<div id='inner_content'>
|
||||
<div class="normal markdown-body">
|
||||
<h2>{{ pagename }}</h2>
|
||||
|
||||
{{ contents|safe }}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endblock %}
|
|
@ -1,9 +1,12 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block head %}
|
||||
<meta property="og:audio" content="{{ siteurl }}{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<audio class="display-audio" controls preload='auto'>
|
||||
<source src='{{ sitepath }}{{ selifpath }}{{ filename }}'>
|
||||
<a href='{{ sitepath }}{{ selifpath }}{{ filename }}'>Download it instead</a>
|
||||
</audio>
|
||||
{% endblock %}
|
||||
|
||||
{% endblock %}
|
|
@ -1,36 +1,36 @@
|
|||
{% extends "../base.html" %}
|
||||
|
||||
{% block title %}{{ filename }}{% endblock %}
|
||||
{% block title %}{{sitename}} - {{ filename }}{% endblock %}
|
||||
|
||||
{% block bodymore %}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
|
||||
<div id="info" class="dinfo info-flex">
|
||||
<div id="filename">
|
||||
{{ filename }}
|
||||
</div>
|
||||
|
||||
<div class="info-actions">
|
||||
{% if expiry %}
|
||||
<span>file expires in {{ expiry }}</span> |
|
||||
{% endif %}
|
||||
{% block infomore %}{% endblock %}
|
||||
<span>{{ size }}</span> |
|
||||
<a href="{{ filename }}/torrent" download>torrent</a> |
|
||||
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}" download>get</a>
|
||||
</div>
|
||||
|
||||
{% block infoleft %}{% endblock %}
|
||||
<div id="info" class="dinfo info-flex">
|
||||
<div id="filename">
|
||||
{{ filename }}
|
||||
</div>
|
||||
|
||||
<div id="main" {% block mainmore %}{% endblock %}>
|
||||
|
||||
<div id='inner_content' {% block innercontentmore %}{% endblock %} >
|
||||
{% block main %}{% endblock %}
|
||||
</div>
|
||||
|
||||
<div class="info-actions">
|
||||
{% if expiry %}
|
||||
<span>file expires in {{ expiry }}</span> |
|
||||
{% endif %}
|
||||
{% block infomore %}{% endblock %}
|
||||
<span>{{ size }}</span> |
|
||||
<a href="{{ filename }}/torrent" download>torrent</a> |
|
||||
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}" download>get</a>
|
||||
</div>
|
||||
|
||||
<script src="{{ sitepath }}static/js/clipboard.js"></script>
|
||||
{% endblock %}
|
||||
{% block infoleft %}{% endblock %}
|
||||
</div>
|
||||
|
||||
<div id="main" {% block mainmore %}{% endblock %}>
|
||||
|
||||
<div id='inner_content' {% block innercontentmore %}{% endblock %}>
|
||||
{% block main %}{% endblock %}
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
<script src="{{ sitepath }}static/js/clipboard.js"></script>
|
||||
{% endblock %}
|
|
@ -1,7 +1,11 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block head %}
|
||||
<meta property="og:image" content="{{ siteurl }}{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<a href="{{ sitepath }}{{ selifpath }}{{ filename }}">
|
||||
<img class="display-image" src="{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
</a>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
|
@ -1,8 +1,12 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block head %}
|
||||
<meta property="og:video" content="{{ siteurl }}{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
{% endblock %}
|
||||
|
||||
{% block main %}
|
||||
<video class="display-video" controls autoplay>
|
||||
<source src="{{ sitepath }}{{ selifpath }}{{ filename }}"/>
|
||||
<source src="{{ sitepath }}{{ selifpath }}{{ filename }}" />
|
||||
<a href='{{ sitepath }}{{ selifpath }}{{ filename }}'>Download it instead</a>
|
||||
</video>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
|
@ -4,9 +4,10 @@
|
|||
<link href='{{ sitepath }}static/css/dropzone.css' media='screen, projection' rel='stylesheet' type='text/css'>
|
||||
{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
{% block content %}
|
||||
<div id="fileupload">
|
||||
<form action="{{ sitepath }}upload" class="dropzone" id="dropzone" method="POST" enctype="multipart/form-data" data-maxsize="{{ maxsize }}">
|
||||
<form action="{{ sitepath }}upload" class="dropzone" id="dropzone" method="POST" enctype="multipart/form-data"
|
||||
data-maxsize="{{ maxsize }}" data-auth="{{ auth }}">
|
||||
<div class="fallback">
|
||||
<input id="fileinput" name="file" type="file" /><br />
|
||||
<input id="submitbtn" type="submit" value="Upload">
|
||||
|
@ -17,16 +18,32 @@
|
|||
</div>
|
||||
|
||||
<div id="choices">
|
||||
<label>{% if not forcerandom %}<input name="randomize" id="randomize" type="checkbox" checked /> Randomize filename{% endif %}</label>
|
||||
<span class="hint--top hint--bounce"
|
||||
data-hint="Replace the filename with random characters. The file extension is retained">
|
||||
<label><input {% if forcerandom %} disabled {% endif %} name="randomize" id="randomize" type="checkbox"
|
||||
checked /> Randomize filename</label>
|
||||
</span>
|
||||
|
||||
<div id="expiry">
|
||||
<label>File expiry:
|
||||
<select name="expires" id="expires">
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<select name="expires" id="expires">
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}" {% if forloop.Last %} selected{% endif %}>
|
||||
{{ expiry.Human }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
</label>
|
||||
</div>
|
||||
<div id="access_key">
|
||||
<span class="hint--top hint--bounce"
|
||||
data-hint="Require password to access (this does not encrypt the file but only limits access)">
|
||||
<label>
|
||||
<input type="checkbox" id="access_key_checkbox" />
|
||||
<span id="access_key_text">Require access password</span>
|
||||
</label>
|
||||
<input id="access_key_input" name="access_key" type="text" placeholder="Access password" />
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
<div class="clear"></div>
|
||||
</form>
|
||||
|
@ -36,4 +53,4 @@
|
|||
|
||||
<script src="{{ sitepath }}static/js/dropzone.js"></script>
|
||||
<script src="{{ sitepath }}static/js/upload.js"></script>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
|
@ -1,29 +1,40 @@
|
|||
{% extends "base.html" %}
|
||||
|
||||
{% block content %}
|
||||
<form id="reply" action='{{ sitepath }}upload' method='post'>
|
||||
<div id="main" class="paste">
|
||||
<div id="info" class="info-flex">
|
||||
<div>
|
||||
{% if not forcerandom %}<span class="hint--top hint--bounce" data-hint="Leave empty for random filename"><input class="codebox" name='filename' id="filename" type='text' value="" placeholder="filename" /></span>{% endif %}.<span class="hint--top hint--bounce" data-hint="Enable syntax highlighting by adding the extension"><input id="extension" class="codebox" name='extension' type='text' value="" placeholder="txt" /></span>
|
||||
</div>
|
||||
<div>
|
||||
<select id="expiry" name="expires">
|
||||
<option disabled>Expires:</option>
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}"{% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button type="submit">Paste</button>
|
||||
</div>
|
||||
</div>
|
||||
{% block title %}{{sitename}} - Paste{% endblock %}
|
||||
|
||||
<div id="inner_content" class="padme">
|
||||
<textarea name='content' id="content" class="editor"></textarea>
|
||||
{% block content %}
|
||||
<form id="reply" action='{{ sitepath }}upload' method='post'>
|
||||
<div id="main" class="paste">
|
||||
<div id="info" class="info-flex">
|
||||
<div>
|
||||
{% if not forcerandom %}<span class="hint--top hint--bounce"
|
||||
data-hint="Leave empty for random filename"><input class="codebox" name='filename' id="filename"
|
||||
type='text' value="" placeholder="filename" /></span>{% endif %}.<span
|
||||
class="hint--top hint--bounce" data-hint="Enable syntax highlighting by adding the extension"><input
|
||||
id="extension" class="codebox" name='extension' type='text' value="" placeholder="txt" /></span>
|
||||
</div>
|
||||
<div>
|
||||
<span class="hint--top hint--bounce" data-hint="Require password to access (leave empty to disable)">
|
||||
<input class="codebox" name="access_key" type="text" placeholder="password" />
|
||||
</span>
|
||||
|
||||
<select id="expiry" name="expires">
|
||||
<option disabled>Expires:</option>
|
||||
{% for expiry in expirylist %}
|
||||
<option value="{{ expiry.Seconds }}" {% if forloop.Last %} selected{% endif %}>{{ expiry.Human }}
|
||||
</option>
|
||||
{% endfor %}
|
||||
</select>
|
||||
<button type="submit">Paste</button>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<div id="inner_content" class="padme">
|
||||
<textarea name='content' id="content" class="editor"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
</form>
|
||||
|
||||
<script src="{{ sitepath }}static/js/util.js"></script>
|
||||
<script src="{{ sitepath }}static/js/paste.js"></script>
|
||||
{% endblock %}
|
||||
{% endblock %}
|
|
@ -68,7 +68,7 @@ func TestCreateTorrentWithImage(t *testing.T) {
|
|||
|
||||
bencode.DecodeBytes(encoded, &decoded)
|
||||
|
||||
if decoded.Info.Pieces != "r\x01\x80j\x99\x84\n\xd3dZ;1NX\xec;\x9d$+f" {
|
||||
if decoded.Info.Pieces != "\xd6\xff\xbf'^)\x85?\xb4.\xb0\xc1|\xa3\x83\xeeX\xf9\xfd\xd7" {
|
||||
t.Fatal("Torrent pieces did not match expected pieces for image")
|
||||
}
|
||||
}
|
||||
|
|
39
upload.go
39
upload.go
|
@ -15,11 +15,12 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/andreimarcu/linx-server/auth/apikeys"
|
||||
"github.com/andreimarcu/linx-server/backends"
|
||||
"github.com/andreimarcu/linx-server/expiry"
|
||||
"github.com/dchest/uniuri"
|
||||
"github.com/gabriel-vasile/mimetype"
|
||||
"github.com/zenazn/goji/web"
|
||||
"gopkg.in/h2non/filetype.v1"
|
||||
)
|
||||
|
||||
var FileTooLargeError = errors.New("File too large.")
|
||||
|
@ -40,6 +41,7 @@ type UploadRequest struct {
|
|||
expiry time.Duration // Seconds until expiry, 0 = never
|
||||
deleteKey string // Empty string if not defined
|
||||
randomBarename bool
|
||||
accessKey string // Empty string if not defined
|
||||
}
|
||||
|
||||
// Metadata associated with a file as it would actually be stored
|
||||
|
@ -88,6 +90,7 @@ func uploadPostHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
}
|
||||
|
||||
upReq.expiry = parseExpiry(r.PostFormValue("expires"))
|
||||
upReq.accessKey = r.PostFormValue(accessKeyParamName)
|
||||
|
||||
if r.PostFormValue("randomize") == "true" {
|
||||
upReq.randomBarename = true
|
||||
|
@ -157,8 +160,22 @@ func uploadPutHandler(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
|
||||
if Config.remoteAuthFile != "" {
|
||||
result, err := checkAuth(remoteAuthKeys, r.FormValue("key"))
|
||||
key := r.FormValue("key")
|
||||
if key == "" && Config.basicAuth {
|
||||
_, password, ok := r.BasicAuth()
|
||||
if ok {
|
||||
key = password
|
||||
}
|
||||
}
|
||||
result, err := apikeys.CheckAuth(remoteAuthKeys, key)
|
||||
if err != nil || !result {
|
||||
if Config.basicAuth {
|
||||
rs := ""
|
||||
if Config.siteName != "" {
|
||||
rs = fmt.Sprintf(` realm="%s"`, Config.siteName)
|
||||
}
|
||||
w.Header().Set("WWW-Authenticate", `Basic`+rs)
|
||||
}
|
||||
unauthorizedHandler(c, w, r)
|
||||
return
|
||||
}
|
||||
|
@ -171,6 +188,7 @@ func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
|
||||
upReq := UploadRequest{}
|
||||
grabUrl, _ := url.Parse(r.FormValue("url"))
|
||||
directURL := r.FormValue("direct_url") == "yes"
|
||||
|
||||
resp, err := http.Get(grabUrl.String())
|
||||
if err != nil {
|
||||
|
@ -181,6 +199,7 @@ func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
upReq.filename = filepath.Base(grabUrl.Path)
|
||||
upReq.src = http.MaxBytesReader(w, resp.Body, Config.maxSize)
|
||||
upReq.deleteKey = r.FormValue("deletekey")
|
||||
upReq.accessKey = r.FormValue(accessKeyParamName)
|
||||
upReq.randomBarename = r.FormValue("randomize") == "yes"
|
||||
upReq.expiry = parseExpiry(r.FormValue("expiry"))
|
||||
|
||||
|
@ -201,7 +220,11 @@ func uploadRemote(c web.C, w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
http.Redirect(w, r, Config.sitePath+upload.Filename, 303)
|
||||
if directURL {
|
||||
http.Redirect(w, r, Config.sitePath+Config.selifPath+upload.Filename, 303)
|
||||
} else {
|
||||
http.Redirect(w, r, Config.sitePath+upload.Filename, 303)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -211,6 +234,7 @@ func uploadHeaderProcess(r *http.Request, upReq *UploadRequest) {
|
|||
}
|
||||
|
||||
upReq.deleteKey = r.Header.Get("Linx-Delete-Key")
|
||||
upReq.accessKey = r.Header.Get(accessKeyHeaderName)
|
||||
|
||||
// Get seconds until expiry. Non-integer responses never expire.
|
||||
expStr := r.Header.Get("Linx-Expiry")
|
||||
|
@ -243,11 +267,11 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
|
|||
header = header[:n]
|
||||
|
||||
// Determine the type of file from header
|
||||
kind, err := filetype.Match(header)
|
||||
if err != nil || kind.Extension == "unknown" {
|
||||
kind := mimetype.Detect(header)
|
||||
if len(kind.Extension()) < 2 {
|
||||
extension = "file"
|
||||
} else {
|
||||
extension = kind.Extension
|
||||
extension = kind.Extension()[1:] // remove leading "."
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -310,7 +334,7 @@ func processUpload(upReq UploadRequest) (upload Upload, err error) {
|
|||
upReq.deleteKey = uniuri.NewLen(30)
|
||||
}
|
||||
|
||||
upload.Metadata, err = storageBackend.Put(upload.Filename, io.MultiReader(bytes.NewReader(header), upReq.src), fileExpiry, upReq.deleteKey)
|
||||
upload.Metadata, err = storageBackend.Put(upload.Filename, io.MultiReader(bytes.NewReader(header), upReq.src), fileExpiry, upReq.deleteKey, upReq.accessKey)
|
||||
if err != nil {
|
||||
return upload, err
|
||||
}
|
||||
|
@ -328,6 +352,7 @@ func generateJSONresponse(upload Upload, r *http.Request) []byte {
|
|||
"direct_url": getSiteURL(r) + Config.selifPath + upload.Filename,
|
||||
"filename": upload.Filename,
|
||||
"delete_key": upload.Metadata.DeleteKey,
|
||||
"access_key": upload.Metadata.AccessKey,
|
||||
"expiry": strconv.FormatInt(upload.Metadata.Expiry.Unix(), 10),
|
||||
"size": strconv.FormatInt(upload.Metadata.Size, 10),
|
||||
"mimetype": upload.Metadata.Mimetype,
|
||||
|
|
143
util.go
143
util.go
|
@ -1,15 +1,10 @@
|
|||
package main
|
||||
|
||||
func extensionToHlAndAceLangs(extension string) (hlExt, aceExt string) {
|
||||
func extensionToHlLang(extension string) (hlExt string) {
|
||||
hlExt, exists := extensionToHl[extension]
|
||||
if !exists {
|
||||
hlExt = "text"
|
||||
}
|
||||
|
||||
aceExt, exists = extensionToAce[extension]
|
||||
if !exists {
|
||||
aceExt = "text"
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -18,83 +13,63 @@ func supportedBinExtension(extension string) bool {
|
|||
return exists
|
||||
}
|
||||
|
||||
var extensionToAce = map[string]string{
|
||||
"c": "c_cpp",
|
||||
"h": "c_cpp",
|
||||
"cpp": "c_cpp",
|
||||
"clj": "clojure",
|
||||
"coffee": "coffee",
|
||||
"cfc": "coldfusion",
|
||||
"cs": "csharp",
|
||||
"sh": "sh",
|
||||
"bash": "sh",
|
||||
"css": "css",
|
||||
"go": "golang",
|
||||
"diff": "diff",
|
||||
"html": "html",
|
||||
"xml": "xml",
|
||||
"ini": "ini",
|
||||
"java": "java",
|
||||
"js": "javascript",
|
||||
"json": "json",
|
||||
"jsp": "jsp",
|
||||
"tex": "latex",
|
||||
"lisp": "lisp",
|
||||
"less": "less",
|
||||
"lua": "lua",
|
||||
"md": "markdown",
|
||||
"ocaml": "ocaml",
|
||||
"tcl": "tcl",
|
||||
"yaml": "yaml",
|
||||
"php": "php",
|
||||
"pl": "perl",
|
||||
"py": "python",
|
||||
"rb": "ruby",
|
||||
"sql": "sql",
|
||||
"apache": "apache",
|
||||
"cmake": "cmake",
|
||||
"bat": "dos",
|
||||
"scala": "scala",
|
||||
"txt": "text",
|
||||
}
|
||||
|
||||
var extensionToHl = map[string]string{
|
||||
"c": "cpp",
|
||||
"h": "cpp",
|
||||
"cpp": "c_cpp",
|
||||
"clj": "clojure",
|
||||
"coffee": "coffee",
|
||||
"cfc": "coldfusion",
|
||||
"cs": "csharp",
|
||||
"sh": "sh",
|
||||
"bash": "sh",
|
||||
"css": "css",
|
||||
"go": "go",
|
||||
"diff": "diff",
|
||||
"html": "html",
|
||||
"htm": "html",
|
||||
"ini": "ini",
|
||||
"java": "java",
|
||||
"js": "javascript",
|
||||
"json": "json",
|
||||
"jsp": "jsp",
|
||||
"tex": "latex",
|
||||
"lisp": "lisp",
|
||||
"less": "less",
|
||||
"lua": "lua",
|
||||
"ocaml": "ocaml",
|
||||
"tcl": "tcl",
|
||||
"nginx": "nginx",
|
||||
"xml": "xml",
|
||||
"yaml": "yaml",
|
||||
"php": "php",
|
||||
"pl": "perl",
|
||||
"py": "python",
|
||||
"rb": "ruby",
|
||||
"sql": "sql",
|
||||
"apache": "apache",
|
||||
"cmake": "cmake",
|
||||
"bat": "dos",
|
||||
"scala": "scala",
|
||||
"txt": "text",
|
||||
"ahk": "autohotkey",
|
||||
"apache": "apache",
|
||||
"applescript": "applescript",
|
||||
"bas": "basic",
|
||||
"bash": "sh",
|
||||
"bat": "dos",
|
||||
"c": "cpp",
|
||||
"cfc": "coldfusion",
|
||||
"clj": "clojure",
|
||||
"cmake": "cmake",
|
||||
"coffee": "coffee",
|
||||
"cpp": "c_cpp",
|
||||
"cs": "csharp",
|
||||
"css": "css",
|
||||
"d": "d",
|
||||
"dart": "dart",
|
||||
"diff": "diff",
|
||||
"dockerfile": "dockerfile",
|
||||
"elm": "elm",
|
||||
"erl": "erlang",
|
||||
"for": "fortran",
|
||||
"go": "go",
|
||||
"h": "cpp",
|
||||
"htm": "html",
|
||||
"html": "html",
|
||||
"ini": "ini",
|
||||
"java": "java",
|
||||
"js": "javascript",
|
||||
"json": "json",
|
||||
"jsp": "jsp",
|
||||
"kt": "kotlin",
|
||||
"less": "less",
|
||||
"lisp": "lisp",
|
||||
"lua": "lua",
|
||||
"m": "objectivec",
|
||||
"nginx": "nginx",
|
||||
"ocaml": "ocaml",
|
||||
"php": "php",
|
||||
"pl": "perl",
|
||||
"proto": "protobuf",
|
||||
"ps": "powershell",
|
||||
"py": "python",
|
||||
"rb": "ruby",
|
||||
"rs": "rust",
|
||||
"scala": "scala",
|
||||
"scm": "scheme",
|
||||
"scpt": "applescript",
|
||||
"scss": "scss",
|
||||
"sh": "sh",
|
||||
"sql": "sql",
|
||||
"tcl": "tcl",
|
||||
"tex": "latex",
|
||||
"toml": "ini",
|
||||
"ts": "typescript",
|
||||
"txt": "text",
|
||||
"xml": "xml",
|
||||
"yaml": "yaml",
|
||||
"yml": "yaml",
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue