Merge pull request #420 from toptal/convert-hastebin-server-to-typescript

Convert haste-server to typescript
This commit is contained in:
Yusuf Yilmaz 2022-06-09 13:13:14 +02:00 committed by GitHub
commit 046a213e41
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
70 changed files with 8041 additions and 2940 deletions

View File

@ -1,2 +1,2 @@
**/*.min.js **/*.min.js
config.js config

53
.eslintrc.js Normal file
View File

@ -0,0 +1,53 @@
module.exports = {
env: {
node: true
},
extends: [
'eslint:recommended',
'plugin:@typescript-eslint/recommended',
'airbnb-base',
'airbnb-typescript/base',
'plugin:import/errors',
'plugin:import/warnings',
'plugin:import/typescript',
'prettier'
],
plugins: ['import', '@typescript-eslint'],
settings: {
'import/parsers': {
'@typescript-eslint/parser': ['.ts']
},
'import/resolver': {
node: {
extensions: ['.js', '.ts'],
moduleDirectory: ['node_modules', 'src/']
},
typescript: {
alwaysTryTypes: true,
project: '.'
}
}
},
overrides: [
{
env: {
jest: true
},
files: ['**/__tests__/**/*.[jt]s', '**/?(*.)+(spec|test).[jt]s'],
extends: ['plugin:jest/recommended'],
rules: {
'import/no-extraneous-dependencies': [
'off',
{ devDependencies: ['**/?(*.)+(spec|test).[jt]s'] }
],
camelcase: ['off']
}
}
],
ignorePatterns: ['**/*.js', 'node_modules', 'dist'],
parserOptions: {
root: true,
tsconfigRootDir: __dirname,
project: ['./tsconfig.json']
}
}

View File

@ -1,25 +0,0 @@
{
"env": {
"es6": true,
"node": true
},
"extends": "eslint:recommended",
"rules": {
"indent": [
"error",
2
],
"linebreak-style": [
"error",
"unix"
],
"quotes": [
"error",
"single"
],
"semi": [
"error",
"always"
]
}
}

View File

@ -2,7 +2,7 @@ name: Close inactive issues and PRs
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
- cron: "30 1 * * *" - cron: '30 1 * * *'
jobs: jobs:
close-stale: close-stale:
@ -15,16 +15,16 @@ jobs:
with: with:
days-before-stale: 30 days-before-stale: 30
days-before-close: 14 days-before-close: 14
stale-issue-label: "stale" stale-issue-label: 'stale'
stale-pr-label: "stale" stale-pr-label: 'stale'
exempt-issue-labels: backlog,triage,nostale exempt-issue-labels: backlog,triage,nostale
exempt-pr-labels: backlog,triage,nostale exempt-pr-labels: backlog,triage,nostale
stale-pr-message: "This PR is stale because it has been open for 30 days with no activity." stale-pr-message: 'This PR is stale because it has been open for 30 days with no activity.'
close-pr-message: "This PR was closed because it has been inactive for 14 days since being marked as stale." close-pr-message: 'This PR was closed because it has been inactive for 14 days since being marked as stale.'
stale-issue-message: "This issue is stale because it has been open for 30 days with no activity." stale-issue-message: 'This issue is stale because it has been open for 30 days with no activity.'
close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale." close-issue-message: 'This issue was closed because it has been inactive for 14 days since being marked as stale.'
repo-token: ${{ secrets.GITHUB_TOKEN }} repo-token: ${{ secrets.GITHUB_TOKEN }}

1
.gitignore vendored
View File

@ -5,3 +5,4 @@ node_modules
data data
*.DS_Store *.DS_Store
docker-compose.override.yml docker-compose.override.yml
dist

3
.prettierignore Normal file
View File

@ -0,0 +1,3 @@
static
/node_modules
config

8
.prettierrc.json Normal file
View File

@ -0,0 +1,8 @@
{
"tabWidth": 2,
"semi": false,
"trailingComma": "none",
"printWidth": 80,
"arrowParens": "avoid",
"singleQuote": true
}

View File

@ -1,20 +1,14 @@
FROM node:14.8.0-stretch FROM node:16-slim as base
RUN mkdir -p /usr/src/app && \ ARG user node
chown node:node /usr/src/app RUN mkdir /app && chown -R $user:$user /app
USER $user
WORKDIR /app
USER node:node COPY --chown=$user:$user package.json yarn.lock /app/
RUN yarn install
WORKDIR /usr/src/app COPY --chown=$user:$user . /app
COPY --chown=node:node . .
RUN npm install && \
npm install redis@0.8.1 && \
npm install pg@4.1.1 && \
npm install memcached@2.2.2 && \
npm install aws-sdk@2.738.0 && \
npm install rethinkdbdash@2.3.31
ENV STORAGE_TYPE=memcached \ ENV STORAGE_TYPE=memcached \
STORAGE_HOST=127.0.0.1 \ STORAGE_HOST=127.0.0.1 \
@ -58,11 +52,16 @@ EXPOSE ${PORT}
STOPSIGNAL SIGINT STOPSIGNAL SIGINT
ENTRYPOINT [ "bash", "docker-entrypoint.sh" ] ENTRYPOINT [ "bash", "docker-entrypoint.sh" ]
RUN yarn remove:files
RUN yarn build:typescript
COPY static /app/dist/static
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s \ HEALTHCHECK --interval=30s --timeout=30s --start-period=5s \
--retries=3 CMD [ "sh", "-c", "echo -n 'curl localhost:7777... '; \ --retries=3 CMD [ "sh", "-c", "echo -n 'curl localhost:7777... '; \
(\ (\
curl -sf localhost:7777 > /dev/null\ curl -sf localhost:7777 > /dev/null\
) && echo OK || (\ ) && echo OK || (\
echo Fail && exit 2\ echo Fail && exit 2\
)"] )"]
CMD ["npm", "start"]
CMD ["yarn", "start"]

128
README.md
View File

@ -1,15 +1,15 @@
# Haste # Haste
Haste is an open-source pastebin software written in node.js, which is easily Haste is an open-source pastebin software written in node.js, which is easily
installable in any network. It can be backed by either redis or filesystem, installable in any network. It can be backed by either redis or filesystem,
and has a very easy adapter interface for other stores. A publicly available and has a very easy adapter interface for other stores. A publicly available
version can be found at [hastebin.com](http://hastebin.com) version can be found at [hastebin.com](http://hastebin.com)
Major design objectives: Major design objectives:
* Be really pretty - Be really pretty
* Be really simple - Be really simple
* Be easy to set up and use - Be easy to set up and use
Haste works really well with a little utility called Haste works really well with a little utility called
[haste-client](https://github.com/seejohnrun/haste-client), allowing you [haste-client](https://github.com/seejohnrun/haste-client), allowing you
@ -18,41 +18,58 @@ to do things like:
`cat something | haste` `cat something | haste`
which will output a URL to share containing the contents of `cat something`'s which will output a URL to share containing the contents of `cat something`'s
STDOUT. Check the README there for more details and usages. STDOUT. Check the README there for more details and usages.
## Tested Browsers ## Tested Browsers
* Firefox 8 - Firefox 8
* Chrome 17 - Chrome 17
* Safari 5.3 - Safari 5.3
## Installation ## Installation
1. Download the package, and expand it 1. Download the package, and expand it
2. Explore the settings inside of config.js, but the defaults should be good 2. `yarn`
3. `npm install`
4. `npm start` (you may specify an optional `<config-path>` as well) ## Running the project
> Explore the settings inside of project-config.js, but the defaults should be good
### Development
1. `yarn`
2. `yarn dev` (you may specify an optional `<config-path>` as well)
### Production
1. `yarn`
2. `yarn build` to build the package
3. `yarn start` to start the server
### Production with Docker
1. `docker-compose up`
## Settings ## Settings
* `host` - the host the server runs on (default localhost) - `host` - the host the server runs on (default localhost)
* `port` - the port the server runs on (default 7777) - `port` - the port the server runs on (default 7777)
* `keyLength` - the length of the keys to user (default 10) - `keyLength` - the length of the keys to user (default 10)
* `maxLength` - maximum length of a paste (default 400000) - `maxLength` - maximum length of a paste (default 400000)
* `staticMaxAge` - max age for static assets (86400) - `staticMaxAge` - max age for static assets (86400)
* `recompressStaticAssets` - whether or not to compile static js assets (true) - `recompressStaticAssets` - whether or not to compile static js assets (true)
* `documents` - static documents to serve (ex: http://hastebin.com/about.com) - `documents` - static documents to serve (ex: http://hastebin.com/about.com)
in addition to static assets. These will never expire. in addition to static assets. These will never expire.
* `storage` - storage options (see below) - `storage` - storage options (see below)
* `logging` - logging preferences - `logging` - logging preferences
* `keyGenerator` - key generator options (see below) - `keyGenerator` - key generator options (see below)
* `rateLimits` - settings for rate limiting (see below) - `rateLimits` - settings for rate limiting (see below)
## Rate Limiting ## Rate Limiting
When present, the `rateLimits` option enables built-in rate limiting courtesy When present, the `rateLimits` option enables built-in rate limiting courtesy
of `connect-ratelimit`. Any of the options supported by that library can be of `connect-ratelimit`. Any of the options supported by that library can be
used and set in `config.js`. used and set in `project-config.js`.
See the README for [connect-ratelimit](https://github.com/dharmafly/connect-ratelimit) See the README for [connect-ratelimit](https://github.com/dharmafly/connect-ratelimit)
for more information! for more information!
@ -63,7 +80,7 @@ for more information!
Attempts to generate phonetic keys, similar to `pwgen` Attempts to generate phonetic keys, similar to `pwgen`
``` json ```json
{ {
"type": "phonetic" "type": "phonetic"
} }
@ -73,7 +90,7 @@ Attempts to generate phonetic keys, similar to `pwgen`
Generates a random key Generates a random key
``` json ```json
{ {
"type": "random", "type": "random",
"keyspace": "abcdef" "keyspace": "abcdef"
@ -87,10 +104,10 @@ for the key.
### File ### File
To use file storage (the default) change the storage section in `config.js` to To use file storage (the default) change the storage section in `project-config.js` to
something like: something like:
``` json ```json
{ {
"path": "./data", "path": "./data",
"type": "file" "type": "file"
@ -106,11 +123,11 @@ File storage currently does not support paste expiration, you can follow [#191](
To use redis storage you must install the `redis` package in npm, and have To use redis storage you must install the `redis` package in npm, and have
`redis-server` running on the machine. `redis-server` running on the machine.
`npm install redis` `yarn add redis`
Once you've done that, your config section should look like: Once you've done that, your config section should look like:
``` json ```json
{ {
"type": "redis", "type": "redis",
"host": "localhost", "host": "localhost",
@ -131,11 +148,11 @@ If your Redis server is configured for password authentification, use the `passw
To use postgres storage you must install the `pg` package in npm To use postgres storage you must install the `pg` package in npm
`npm install pg` `yarn add pg`
Once you've done that, your config section should look like: Once you've done that, your config section should look like:
``` json ```json
{ {
"type": "postgres", "type": "postgres",
"connectionUrl": "postgres://user:password@host:5432/database" "connectionUrl": "postgres://user:password@host:5432/database"
@ -158,11 +175,11 @@ All of which are optional except `type` with very logical default values.
To use mongodb storage you must install the 'mongodb' package in npm To use mongodb storage you must install the 'mongodb' package in npm
`npm install mongodb` `yarn add mongodb`
Once you've done that, your config section should look like: Once you've done that, your config section should look like:
``` json ```json
{ {
"type": "mongo", "type": "mongo",
"connectionUrl": "mongodb://localhost:27017/database" "connectionUrl": "mongodb://localhost:27017/database"
@ -180,11 +197,11 @@ This is off by default, but will constantly kick back expirations on each view o
To use memcache storage you must install the `memcached` package via npm To use memcache storage you must install the `memcached` package via npm
`npm install memcached` `yarn add memcached`
Once you've done that, your config section should look like: Once you've done that, your config section should look like:
``` json ```json
{ {
"type": "memcached", "type": "memcached",
"host": "127.0.0.1", "host": "127.0.0.1",
@ -202,11 +219,11 @@ All of which are optional except `type` with very logical default values.
To use the RethinkDB storage system, you must install the `rethinkdbdash` package via npm To use the RethinkDB storage system, you must install the `rethinkdbdash` package via npm
`npm install rethinkdbdash` `yarn add rethinkdbdash`
Once you've done that, your config section should look like this: Once you've done that, your config section should look like this:
``` json ```json
{ {
"type": "rethinkdb", "type": "rethinkdb",
"host": "127.0.0.1", "host": "127.0.0.1",
@ -224,11 +241,11 @@ You can optionally add the `user` and `password` properties to use a user system
To use the Google Datastore storage system, you must install the `@google-cloud/datastore` package via npm To use the Google Datastore storage system, you must install the `@google-cloud/datastore` package via npm
`npm install @google-cloud/datastore` `yarn add @google-cloud/datastore`
Once you've done that, your config section should look like this: Once you've done that, your config section should look like this:
``` json ```json
{ {
"type": "google-datastore" "type": "google-datastore"
} }
@ -241,7 +258,7 @@ Authentication is handled automatically by [Google Cloud service account credent
To use [Amazon S3](https://aws.amazon.com/s3/) as a storage system, you must To use [Amazon S3](https://aws.amazon.com/s3/) as a storage system, you must
install the `aws-sdk` package via npm: install the `aws-sdk` package via npm:
`npm install aws-sdk` `yarn add aws-sdk`
Once you've done that, your config section should look like this: Once you've done that, your config section should look like this:
@ -260,17 +277,14 @@ your bucket:
```json ```json
{ {
"Version": "2012-10-17", "Version": "2012-10-17",
"Statement": [ "Statement": [
{ {
"Action": [ "Action": ["s3:GetObject", "s3:PutObject"],
"s3:GetObject", "Effect": "Allow",
"s3:PutObject" "Resource": "arn:aws:s3:::your-bucket-name-goes-here/*"
], }
"Effect": "Allow", ]
"Resource": "arn:aws:s3:::your-bucket-name-goes-here/*"
}
]
} }
``` ```
@ -384,6 +398,6 @@ SOFTWARE
### Other components: ### Other components:
* jQuery: MIT/GPL license - jQuery: MIT/GPL license
* highlight.js: Copyright © 2006, Ivan Sagalaev - highlight.js: Copyright © 2006, Ivan Sagalaev
* highlightjs-coffeescript: WTFPL - Copyright © 2011, Dmytrii Nagirniak - highlightjs-coffeescript: WTFPL - Copyright © 2011, Dmytrii Nagirniak

View File

@ -8,7 +8,7 @@ Haste is the prettiest, easiest to use pastebin ever made.
## Basic Usage ## Basic Usage
Type what you want me to see, click "Save", and then copy the URL. Send that Type what you want me to see, click "Save", and then copy the URL. Send that
URL to someone and they'll see what you see. URL to someone and they'll see what you see.
To make a new entry, click "New" (or type 'control + n') To make a new entry, click "New" (or type 'control + n')
@ -16,7 +16,7 @@ To make a new entry, click "New" (or type 'control + n')
## From the Console ## From the Console
Most of the time I want to show you some text, it's coming from my current Most of the time I want to show you some text, it's coming from my current
console session. We should make it really easy to take code from the console console session. We should make it really easy to take code from the console
and send it to people. and send it to people.
`cat something | haste` # https://hastebin.com/1238193 `cat something | haste` # https://hastebin.com/1238193
@ -24,27 +24,28 @@ and send it to people.
You can even take this a step further, and cut out the last step of copying the You can even take this a step further, and cut out the last step of copying the
URL with: URL with:
* osx: `cat something | haste | pbcopy` - osx: `cat something | haste | pbcopy`
* linux: `cat something | haste | xsel` - linux: `cat something | haste | xsel`
* windows: check out [WinHaste](https://github.com/ajryan/WinHaste) - windows: check out [WinHaste](https://github.com/ajryan/WinHaste)
After running that, the STDOUT output of `cat something` will show up at a URL After running that, the STDOUT output of `cat something` will show up at a URL
which has been conveniently copied to your clipboard. which has been conveniently copied to your clipboard.
That's all there is to that, and you can install it with `gem install haste` That's all there is to that, and you can install it with `gem install haste`
right now. right now.
* osx: you will need to have an up to date version of Xcode
* linux: you will need to have rubygems and ruby-devel installed - osx: you will need to have an up to date version of Xcode
- linux: you will need to have rubygems and ruby-devel installed
## Duration ## Duration
Pastes will stay for 30 days from their last view. They may be removed earlier Pastes will stay for 30 days from their last view. They may be removed earlier
and without notice. and without notice.
## Privacy ## Privacy
While the contents of hastebin.com are not directly crawled by any search robot While the contents of hastebin.com are not directly crawled by any search robot
that obeys "robots.txt", there should be no great expectation of privacy. Post that obeys "robots.txt", there should be no great expectation of privacy. Post
things at your own risk. Not responsible for any loss of data or removed things at your own risk. Not responsible for any loss of data or removed
pastes. pastes.
@ -52,8 +53,8 @@ pastes.
Haste can easily be installed behind your network, and it's all open source! Haste can easily be installed behind your network, and it's all open source!
* [haste-client](https://github.com/seejohnrun/haste-client) - [haste-client](https://github.com/seejohnrun/haste-client)
* [haste-server](https://github.com/seejohnrun/haste-server) - [haste-server](https://github.com/seejohnrun/haste-server)
## Author ## Author

12
config/jest.config.js Normal file
View File

@ -0,0 +1,12 @@
/** @type {import('ts-jest/dist/types').InitialOptionsTsJest} */
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
rootDir: '../',
testRegex: '\\.test\\.ts$',
reporters: ['default'],
roots: ['test'],
moduleNameMapper: {
'src/(.*)': '<rootDir>/src/$1'
}
}

View File

@ -28,8 +28,8 @@ const {
RATE_LIMITS_BLACKLIST_TOTAL_REQUESTS, RATE_LIMITS_BLACKLIST_TOTAL_REQUESTS,
RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS, RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS,
RATE_LIMITS_BLACKLIST, RATE_LIMITS_BLACKLIST,
DOCUMENTS, DOCUMENTS
} = process.env; } = process.env
const config = { const config = {
host: HOST, host: HOST,
@ -47,29 +47,29 @@ const config = {
{ {
level: LOGGING_LEVEL, level: LOGGING_LEVEL,
type: LOGGING_TYPE, type: LOGGING_TYPE,
colorize: LOGGING_COLORIZE, colorize: LOGGING_COLORIZE
}, }
], ],
keyGenerator: { keyGenerator: {
type: KEYGENERATOR_TYPE, type: KEYGENERATOR_TYPE,
keyspace: KEY_GENERATOR_KEYSPACE, keyspace: KEY_GENERATOR_KEYSPACE
}, },
rateLimits: { rateLimits: {
whitelist: RATE_LIMITS_WHITELIST ? RATE_LIMITS_WHITELIST.split(",") : [], whitelist: RATE_LIMITS_WHITELIST ? RATE_LIMITS_WHITELIST.split(',') : [],
blacklist: RATE_LIMITS_BLACKLIST ? RATE_LIMITS_BLACKLIST.split(",") : [], blacklist: RATE_LIMITS_BLACKLIST ? RATE_LIMITS_BLACKLIST.split(',') : [],
categories: { categories: {
normal: { normal: {
totalRequests: RATE_LIMITS_NORMAL_TOTAL_REQUESTS, totalRequests: RATE_LIMITS_NORMAL_TOTAL_REQUESTS,
every: RATE_LIMITS_NORMAL_EVERY_MILLISECONDS, every: RATE_LIMITS_NORMAL_EVERY_MILLISECONDS
}, },
whitelist: whitelist:
RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS || RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS ||
RATE_LIMITS_WHITELIST_TOTAL_REQUESTS RATE_LIMITS_WHITELIST_TOTAL_REQUESTS
? { ? {
totalRequests: RATE_LIMITS_WHITELIST_TOTAL_REQUESTS, totalRequests: RATE_LIMITS_WHITELIST_TOTAL_REQUESTS,
every: RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS, every: RATE_LIMITS_WHITELIST_EVERY_MILLISECONDS
} }
: null, : null,
blacklist: blacklist:
@ -77,10 +77,10 @@ const config = {
RATE_LIMITS_BLACKLIST_TOTAL_REQUESTS RATE_LIMITS_BLACKLIST_TOTAL_REQUESTS
? { ? {
totalRequests: RATE_LIMITS_WHITELIST_TOTAL_REQUESTS, totalRequests: RATE_LIMITS_WHITELIST_TOTAL_REQUESTS,
every: RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS, every: RATE_LIMITS_BLACKLIST_EVERY_MILLISECONDS
} }
: null, : null
}, }
}, },
storage: { storage: {
@ -94,15 +94,15 @@ const config = {
db: STORAGE_DB, db: STORAGE_DB,
user: STORAGE_USERNAME, user: STORAGE_USERNAME,
password: STORAGE_PASSWORD, password: STORAGE_PASSWORD,
path: STORAGE_FILEPATH, path: STORAGE_FILEPATH
}, },
documents: DOCUMENTS documents: DOCUMENTS
? DOCUMENTS.split(",").reduce((acc, item) => { ? DOCUMENTS.split(',').reduce((acc, item) => {
const keyAndValueArray = item.replace(/\s/g, "").split("="); const keyAndValueArray = item.replace(/\s/g, '').split('=')
return { ...acc, [keyAndValueArray[0]]: keyAndValueArray[1] }; return { ...acc, [keyAndValueArray[0]]: keyAndValueArray[1] }
}, {}) }, {})
: null, : null
}; }
console.log(JSON.stringify(config)); console.log(JSON.stringify(config))

View File

@ -4,6 +4,6 @@
set -e set -e
node ./docker-entrypoint.js > ./config.js node ./docker-entrypoint.js > ./config/project-config.js
exec "$@" exec "$@"

View File

@ -1,155 +0,0 @@
var winston = require('winston');
var Busboy = require('busboy');
// For handling serving stored documents
var DocumentHandler = function(options) {
if (!options) {
options = {};
}
this.keyLength = options.keyLength || DocumentHandler.defaultKeyLength;
this.maxLength = options.maxLength; // none by default
this.store = options.store;
this.keyGenerator = options.keyGenerator;
};
DocumentHandler.defaultKeyLength = 10;
// Handle retrieving a document
DocumentHandler.prototype.handleGet = function(request, response, config) {
const key = request.params.id.split('.')[0];
const skipExpire = !!config.documents[key];
this.store.get(key, function(ret) {
if (ret) {
winston.verbose('retrieved document', { key: key });
response.writeHead(200, { 'content-type': 'application/json' });
if (request.method === 'HEAD') {
response.end();
} else {
response.end(JSON.stringify({ data: ret, key: key }));
}
}
else {
winston.warn('document not found', { key: key });
response.writeHead(404, { 'content-type': 'application/json' });
if (request.method === 'HEAD') {
response.end();
} else {
response.end(JSON.stringify({ message: 'Document not found.' }));
}
}
}, skipExpire);
};
// Handle retrieving the raw version of a document
DocumentHandler.prototype.handleRawGet = function(request, response, config) {
const key = request.params.id.split('.')[0];
const skipExpire = !!config.documents[key];
this.store.get(key, function(ret) {
if (ret) {
winston.verbose('retrieved raw document', { key: key });
response.writeHead(200, { 'content-type': 'text/plain; charset=UTF-8' });
if (request.method === 'HEAD') {
response.end();
} else {
response.end(ret);
}
}
else {
winston.warn('raw document not found', { key: key });
response.writeHead(404, { 'content-type': 'application/json' });
if (request.method === 'HEAD') {
response.end();
} else {
response.end(JSON.stringify({ message: 'Document not found.' }));
}
}
}, skipExpire);
};
// Handle adding a new Document
DocumentHandler.prototype.handlePost = function (request, response) {
var _this = this;
var buffer = '';
var cancelled = false;
// What to do when done
var onSuccess = function () {
// Check length
if (_this.maxLength && buffer.length > _this.maxLength) {
cancelled = true;
winston.warn('document >maxLength', { maxLength: _this.maxLength });
response.writeHead(400, { 'content-type': 'application/json' });
response.end(
JSON.stringify({ message: 'Document exceeds maximum length.' })
);
return;
}
// And then save if we should
_this.chooseKey(function (key) {
_this.store.set(key, buffer, function (res) {
if (res) {
winston.verbose('added document', { key: key });
response.writeHead(200, { 'content-type': 'application/json' });
response.end(JSON.stringify({ key: key }));
}
else {
winston.verbose('error adding document');
response.writeHead(500, { 'content-type': 'application/json' });
response.end(JSON.stringify({ message: 'Error adding document.' }));
}
});
});
};
// If we should, parse a form to grab the data
var ct = request.headers['content-type'];
if (ct && ct.split(';')[0] === 'multipart/form-data') {
var busboy = new Busboy({ headers: request.headers });
busboy.on('field', function (fieldname, val) {
if (fieldname === 'data') {
buffer = val;
}
});
busboy.on('finish', function () {
onSuccess();
});
request.pipe(busboy);
// Otherwise, use our own and just grab flat data from POST body
} else {
request.on('data', function (data) {
buffer += data.toString();
});
request.on('end', function () {
if (cancelled) { return; }
onSuccess();
});
request.on('error', function (error) {
winston.error('connection error: ' + error.message);
response.writeHead(500, { 'content-type': 'application/json' });
response.end(JSON.stringify({ message: 'Connection error.' }));
cancelled = true;
});
}
};
// Keep choosing keys until one isn't taken
DocumentHandler.prototype.chooseKey = function(callback) {
var key = this.acceptableKey();
var _this = this;
this.store.get(key, function(ret) {
if (ret) {
_this.chooseKey(callback);
} else {
callback(key);
}
}, true); // Don't bump expirations when key searching
};
DocumentHandler.prototype.acceptableKey = function() {
return this.keyGenerator.createKey(this.keyLength);
};
module.exports = DocumentHandler;

View File

@ -1,56 +0,0 @@
/*global require,module,process*/
var AWS = require('aws-sdk');
var winston = require('winston');
var AmazonS3DocumentStore = function(options) {
this.expire = options.expire;
this.bucket = options.bucket;
this.client = new AWS.S3({region: options.region});
};
AmazonS3DocumentStore.prototype.get = function(key, callback, skipExpire) {
var _this = this;
var req = {
Bucket: _this.bucket,
Key: key
};
_this.client.getObject(req, function(err, data) {
if(err) {
callback(false);
}
else {
callback(data.Body.toString('utf-8'));
if (_this.expire && !skipExpire) {
winston.warn('amazon s3 store cannot set expirations on keys');
}
}
});
}
AmazonS3DocumentStore.prototype.set = function(key, data, callback, skipExpire) {
var _this = this;
var req = {
Bucket: _this.bucket,
Key: key,
Body: data,
ContentType: 'text/plain'
};
_this.client.putObject(req, function(err, data) {
if (err) {
callback(false);
}
else {
callback(true);
if (_this.expire && !skipExpire) {
winston.warn('amazon s3 store cannot set expirations on keys');
}
}
});
}
module.exports = AmazonS3DocumentStore;

View File

@ -1,63 +0,0 @@
var fs = require('fs');
var crypto = require('crypto');
var winston = require('winston');
// For storing in files
// options[type] = file
// options[path] - Where to store
var FileDocumentStore = function(options) {
this.basePath = options.path || './data';
this.expire = options.expire;
};
// Generate md5 of a string
FileDocumentStore.md5 = function(str) {
var md5sum = crypto.createHash('md5');
md5sum.update(str);
return md5sum.digest('hex');
};
// Save data in a file, key as md5 - since we don't know what we could
// be passed here
FileDocumentStore.prototype.set = function(key, data, callback, skipExpire) {
try {
var _this = this;
fs.mkdir(this.basePath, '700', function() {
var fn = _this.basePath + '/' + FileDocumentStore.md5(key);
fs.writeFile(fn, data, 'utf8', function(err) {
if (err) {
callback(false);
}
else {
callback(true);
if (_this.expire && !skipExpire) {
winston.warn('file store cannot set expirations on keys');
}
}
});
});
} catch(err) {
callback(false);
}
};
// Get data from a file from key
FileDocumentStore.prototype.get = function(key, callback, skipExpire) {
var _this = this;
var fn = this.basePath + '/' + FileDocumentStore.md5(key);
fs.readFile(fn, 'utf8', function(err, data) {
if (err) {
callback(false);
}
else {
callback(data);
if (_this.expire && !skipExpire) {
winston.warn('file store cannot set expirations on keys');
}
}
});
};
module.exports = FileDocumentStore;

View File

@ -1,89 +0,0 @@
/*global require,module,process*/
const Datastore = require('@google-cloud/datastore');
const winston = require('winston');
class GoogleDatastoreDocumentStore {
// Create a new store with options
constructor(options) {
this.kind = "Haste";
this.expire = options.expire;
this.datastore = new Datastore();
}
// Save file in a key
set(key, data, callback, skipExpire) {
var expireTime = (skipExpire || this.expire === undefined) ? null : new Date(Date.now() + this.expire * 1000);
var taskKey = this.datastore.key([this.kind, key])
var task = {
key: taskKey,
data: [
{
name: 'value',
value: data,
excludeFromIndexes: true
},
{
name: 'expiration',
value: expireTime
}
]
};
this.datastore.insert(task).then(() => {
callback(true);
})
.catch(err => {
callback(false);
});
}
// Get a file from a key
get(key, callback, skipExpire) {
var taskKey = this.datastore.key([this.kind, key])
this.datastore.get(taskKey).then((entity) => {
if (skipExpire || entity[0]["expiration"] == null) {
callback(entity[0]["value"]);
}
else {
// check for expiry
if (entity[0]["expiration"] < new Date()) {
winston.info("document expired", {key: key, expiration: entity[0]["expiration"], check: new Date(null)});
callback(false);
}
else {
// update expiry
var task = {
key: taskKey,
data: [
{
name: 'value',
value: entity[0]["value"],
excludeFromIndexes: true
},
{
name: 'expiration',
value: new Date(Date.now() + this.expire * 1000)
}
]
};
this.datastore.update(task).then(() => {
})
.catch(err => {
winston.error("failed to update expiration", {error: err});
});
callback(entity[0]["value"]);
}
}
})
.catch(err => {
winston.error("Error retrieving value from Google Datastore", {error: err});
callback(false);
});
}
}
module.exports = GoogleDatastoreDocumentStore;

View File

@ -1,54 +0,0 @@
const memcached = require('memcached');
const winston = require('winston');
class MemcachedDocumentStore {
// Create a new store with options
constructor(options) {
this.expire = options.expire;
const host = options.host || '127.0.0.1';
const port = options.port || 11211;
const url = `${host}:${port}`;
this.connect(url);
}
// Create a connection
connect(url) {
this.client = new memcached(url);
winston.info(`connecting to memcached on ${url}`);
this.client.on('failure', function(error) {
winston.info('error connecting to memcached', {error});
});
}
// Save file in a key
set(key, data, callback, skipExpire) {
this.client.set(key, data, skipExpire ? 0 : this.expire || 0, (error) => {
callback(!error);
});
}
// Get a file from a key
get(key, callback, skipExpire) {
this.client.get(key, (error, data) => {
const value = error ? false : data;
callback(value);
// Update the key so that the expiration is pushed forward
if (value && !skipExpire) {
this.set(key, data, (updateSucceeded) => {
if (!updateSucceeded) {
winston.error('failed to update expiration on GET', {key});
}
}, skipExpire);
}
});
}
}
module.exports = MemcachedDocumentStore;

View File

@ -1,88 +0,0 @@
var MongoClient = require('mongodb').MongoClient,
winston = require('winston');
var MongoDocumentStore = function (options) {
this.expire = options.expire;
this.connectionUrl = process.env.DATABASE_URl || options.connectionUrl;
};
MongoDocumentStore.prototype.set = function (key, data, callback, skipExpire) {
var now = Math.floor(new Date().getTime() / 1000),
that = this;
this.safeConnect(function (err, db) {
if (err)
return callback(false);
db.collection('entries').update({
'entry_id': key,
$or: [
{ expiration: -1 },
{ expiration: { $gt: now } }
]
}, {
'entry_id': key,
'value': data,
'expiration': that.expire && !skipExpire ? that.expire + now : -1
}, {
upsert: true
}, function (err, existing) {
if (err) {
winston.error('error persisting value to mongodb', { error: err });
return callback(false);
}
callback(true);
});
});
};
MongoDocumentStore.prototype.get = function (key, callback, skipExpire) {
var now = Math.floor(new Date().getTime() / 1000),
that = this;
this.safeConnect(function (err, db) {
if (err)
return callback(false);
db.collection('entries').findOne({
'entry_id': key,
$or: [
{ expiration: -1 },
{ expiration: { $gt: now } }
]
}, function (err, entry) {
if (err) {
winston.error('error persisting value to mongodb', { error: err });
return callback(false);
}
callback(entry === null ? false : entry.value);
if (entry !== null && entry.expiration !== -1 && that.expire && !skipExpire) {
db.collection('entries').update({
'entry_id': key
}, {
$set: {
'expiration': that.expire + now
}
}, function (err, result) { });
}
});
});
};
MongoDocumentStore.prototype.safeConnect = function (callback) {
MongoClient.connect(this.connectionUrl, function (err, db) {
if (err) {
winston.error('error connecting to mongodb', { error: err });
callback(err);
} else {
callback(undefined, db);
}
});
};
module.exports = MongoDocumentStore;

View File

@ -1,80 +0,0 @@
/*global require,module,process*/
var winston = require('winston');
const {Pool} = require('pg');
// create table entries (id serial primary key, key varchar(255) not null, value text not null, expiration int, unique(key));
// A postgres document store
var PostgresDocumentStore = function (options) {
this.expireJS = parseInt(options.expire, 10);
const connectionString = process.env.DATABASE_URL || options.connectionUrl;
this.pool = new Pool({connectionString});
};
PostgresDocumentStore.prototype = {
// Set a given key
set: function (key, data, callback, skipExpire) {
var now = Math.floor(new Date().getTime() / 1000);
var that = this;
this.safeConnect(function (err, client, done) {
if (err) { return callback(false); }
client.query('INSERT INTO entries (key, value, expiration) VALUES ($1, $2, $3)', [
key,
data,
that.expireJS && !skipExpire ? that.expireJS + now : null
], function (err) {
if (err) {
winston.error('error persisting value to postgres', { error: err });
return callback(false);
}
callback(true);
done();
});
});
},
// Get a given key's data
get: function (key, callback, skipExpire) {
var now = Math.floor(new Date().getTime() / 1000);
var that = this;
this.safeConnect(function (err, client, done) {
if (err) { return callback(false); }
client.query('SELECT id,value,expiration from entries where KEY = $1 and (expiration IS NULL or expiration > $2)', [key, now], function (err, result) {
if (err) {
winston.error('error retrieving value from postgres', { error: err });
return callback(false);
}
callback(result.rows.length ? result.rows[0].value : false);
if (result.rows.length && that.expireJS && !skipExpire) {
client.query('UPDATE entries SET expiration = $1 WHERE ID = $2', [
that.expireJS + now,
result.rows[0].id
], function (err) {
if (!err) {
done();
}
});
} else {
done();
}
});
});
},
// A connection wrapper
safeConnect: function (callback) {
this.pool.connect((error, client, done) => {
if (error) {
winston.error('error connecting to postgres', {error});
callback(error);
} else {
callback(undefined, client, done);
}
});
}
};
module.exports = PostgresDocumentStore;

View File

@ -1,89 +0,0 @@
var redis = require('redis');
var winston = require('winston');
// For storing in redis
// options[type] = redis
// options[host] - The host to connect to (default localhost)
// options[port] - The port to connect to (default 5379)
// options[db] - The db to use (default 0)
// options[expire] - The time to live for each key set (default never)
var RedisDocumentStore = function(options, client) {
this.expire = options.expire;
if (client) {
winston.info('using predefined redis client');
RedisDocumentStore.client = client;
} else if (!RedisDocumentStore.client) {
winston.info('configuring redis');
RedisDocumentStore.connect(options);
}
};
// Create a connection according to config
RedisDocumentStore.connect = function(options) {
var host = options.host || '127.0.0.1';
var port = options.port || 6379;
var index = options.db || 0;
RedisDocumentStore.client = redis.createClient(port, host);
// authenticate if password is provided
if (options.password) {
RedisDocumentStore.client.auth(options.password);
}
RedisDocumentStore.client.on('error', function(err) {
winston.error('redis disconnected', err);
});
RedisDocumentStore.client.select(index, function(err) {
if (err) {
winston.error(
'error connecting to redis index ' + index,
{ error: err }
);
process.exit(1);
}
else {
winston.info('connected to redis on ' + host + ':' + port + '/' + index);
}
});
};
// Save file in a key
RedisDocumentStore.prototype.set = function(key, data, callback, skipExpire) {
var _this = this;
RedisDocumentStore.client.set(key, data, function(err) {
if (err) {
callback(false);
}
else {
if (!skipExpire) {
_this.setExpiration(key);
}
callback(true);
}
});
};
// Expire a key in expire time if set
RedisDocumentStore.prototype.setExpiration = function(key) {
if (this.expire) {
RedisDocumentStore.client.expire(key, this.expire, function(err) {
if (err) {
winston.error('failed to set expiry on key: ' + key);
}
});
}
};
// Get a file from a key
RedisDocumentStore.prototype.get = function(key, callback, skipExpire) {
var _this = this;
RedisDocumentStore.client.get(key, function(err, reply) {
if (!err && !skipExpire) {
_this.setExpiration(key);
}
callback(err ? false : reply);
});
};
module.exports = RedisDocumentStore;

View File

@ -1,46 +0,0 @@
const crypto = require('crypto');
const rethink = require('rethinkdbdash');
const winston = require('winston');
const md5 = (str) => {
const md5sum = crypto.createHash('md5');
md5sum.update(str);
return md5sum.digest('hex');
};
class RethinkDBStore {
constructor(options) {
this.client = rethink({
silent: true,
host: options.host || '127.0.0.1',
port: options.port || 28015,
db: options.db || 'haste',
user: options.user || 'admin',
password: options.password || ''
});
}
set(key, data, callback) {
this.client.table('uploads').insert({ id: md5(key), data: data }).run((error) => {
if (error) {
callback(false);
winston.error('failed to insert to table', error);
return;
}
callback(true);
});
}
get(key, callback) {
this.client.table('uploads').get(md5(key)).run((error, result) => {
if (error || !result) {
callback(false);
if (error) winston.error('failed to insert to table', error);
return;
}
callback(result.data);
});
}
}
module.exports = RethinkDBStore;

View File

@ -1,32 +0,0 @@
const fs = require('fs');
module.exports = class DictionaryGenerator {
constructor(options, readyCallback) {
// Check options format
if (!options) throw Error('No options passed to generator');
if (!options.path) throw Error('No dictionary path specified in options');
// Load dictionary
fs.readFile(options.path, 'utf8', (err, data) => {
if (err) throw err;
this.dictionary = data.split(/[\n\r]+/);
if (readyCallback) readyCallback();
});
}
// Generates a dictionary-based key, of keyLength words
createKey(keyLength) {
let text = '';
for (let i = 0; i < keyLength; i++) {
const index = Math.floor(Math.random() * this.dictionary.length);
text += this.dictionary[index];
}
return text;
}
};

View File

@ -1,27 +0,0 @@
// Draws inspiration from pwgen and http://tools.arantius.com/password
const randOf = (collection) => {
return () => {
return collection[Math.floor(Math.random() * collection.length)];
};
};
// Helper methods to get an random vowel or consonant
const randVowel = randOf('aeiou');
const randConsonant = randOf('bcdfghjklmnpqrstvwxyz');
module.exports = class PhoneticKeyGenerator {
// Generate a phonetic key of alternating consonant & vowel
createKey(keyLength) {
let text = '';
const start = Math.round(Math.random());
for (let i = 0; i < keyLength; i++) {
text += (i % 2 == start) ? randConsonant() : randVowel();
}
return text;
}
};

View File

@ -1,20 +0,0 @@
module.exports = class RandomKeyGenerator {
// Initialize a new generator with the given keySpace
constructor(options = {}) {
this.keyspace = options.keyspace || 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
}
// Generate a key of the given length
createKey(keyLength) {
var text = '';
for (var i = 0; i < keyLength; i++) {
const index = Math.floor(Math.random() * this.keyspace.length);
text += this.keyspace.charAt(index);
}
return text;
}
};

1652
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -12,36 +12,85 @@
"email": "john.crepezzi@gmail.com", "email": "john.crepezzi@gmail.com",
"url": "http://seejohncode.com/" "url": "http://seejohncode.com/"
}, },
"main": "haste",
"dependencies": { "dependencies": {
"busboy": "0.2.4", "busboy": "0.2.4",
"connect": "^3.7.0", "connect": "^3.7.0",
"connect-ratelimit": "0.0.7", "connect-ratelimit": "^0.0.7",
"connect-route": "0.1.5", "connect-route": "0.1.5",
"pg": "^8.0.0", "dotenv": "^16.0.1",
"redis": "0.8.1", "express": "^4.18.1",
"redis-url": "0.1.0", "st": "^3.0.0",
"st": "^2.0.0",
"uglify-js": "3.1.6", "uglify-js": "3.1.6",
"winston": "^2.0.0" "winston": "^2.0.0"
}, },
"devDependencies": { "devDependencies": {
"mocha": "^8.1.3" "@types/aws-sdk": "^2.7.0",
"@types/busboy": "^1.5.0",
"@types/express": "^4.17.13",
"@types/google-cloud__datastore": "^1.3.6",
"@types/jest": "^27.5.1",
"@types/memcached": "^2.2.7",
"@types/mongodb": "^4.0.7",
"@types/node": "^17.0.35",
"@types/pg": "^8.6.5",
"@types/redis": "^4.0.11",
"@types/uglify-js": "^3.13.2",
"@typescript-eslint/eslint-plugin": "^5.26.0",
"@typescript-eslint/parser": "^5.26.0",
"concurrently": "^7.2.1",
"copyfiles": "^2.4.1",
"eslint": "^8.10.0",
"eslint-config-airbnb": "^19.0.4",
"eslint-config-airbnb-typescript": "^17.0.0",
"eslint-config-prettier": "^8.5.0",
"eslint-import-resolver-typescript": "^2.7.1",
"eslint-plugin-import": "^2.26.0",
"eslint-plugin-jest": "^26.2.2",
"jest": "^28.1.0",
"mocha": "^8.1.3",
"module-resolver": "^1.0.0",
"nodemon": "^2.0.16",
"prettier": "^2.5.1",
"rimraf": "^3.0.2",
"ts-auto-mock": "^3.6.2",
"ts-jest": "^28.0.3",
"ts-node": "^9.1.1",
"tsconfig-paths": "^4.0.0",
"tscpaths": "^0.0.9",
"typescript": "^4.6.4"
}, },
"bundledDependencies": [], "bundledDependencies": [],
"main": "haste",
"bin": { "bin": {
"haste-server": "./server.js" "haste-server": "./dist/src/server.js"
}, },
"files": [ "files": [
"server.js", "src",
"lib",
"static" "static"
], ],
"directories": { "nodemonConfig": {
"lib": "./lib" "ignore": [
"test/**/*.test.ts",
".git",
"node_modules"
],
"watch": [
"src",
"config"
],
"exec": "node -r tsconfig-paths/register -r ts-node/register ./src/server.ts",
"ext": "ts, js"
}, },
"scripts": { "scripts": {
"start": "node server.js", "copy:files": "copyFiles -u 1 static/**/* dist/static",
"test": "mocha --recursive" "remove:files": "rimraf dist",
"test:unit": "jest --config config/jest.config.js",
"build:typescript": "tsc --project tsconfig.json",
"build": "yarn remove:files && yarn copy:files && yarn build:typescript",
"start": "TS_NODE_BASEURL=./dist node -r tsconfig-paths/register ./dist/src/server.js",
"dev": "nodemon",
"lint": "eslint src --fix",
"types:check": "tsc --noEmit --pretty",
"pretty": "prettier --write ."
} }
} }

164
server.js
View File

@ -1,164 +0,0 @@
var http = require('http');
var fs = require('fs');
var uglify = require('uglify-js');
var winston = require('winston');
var connect = require('connect');
var route = require('connect-route');
var connect_st = require('st');
var connect_rate_limit = require('connect-ratelimit');
var DocumentHandler = require('./lib/document_handler');
// Load the configuration and set some defaults
const configPath = process.argv.length <= 2 ? 'config.js' : process.argv[2];
const config = JSON.parse(fs.readFileSync(configPath, 'utf8'));
config.port = process.env.PORT || config.port || 7777;
config.host = process.env.HOST || config.host || 'localhost';
// Set up the logger
if (config.logging) {
try {
winston.remove(winston.transports.Console);
} catch(e) {
/* was not present */
}
var detail, type;
for (var i = 0; i < config.logging.length; i++) {
detail = config.logging[i];
type = detail.type;
delete detail.type;
winston.add(winston.transports[type], detail);
}
}
// build the store from the config on-demand - so that we don't load it
// for statics
if (!config.storage) {
config.storage = { type: 'file' };
}
if (!config.storage.type) {
config.storage.type = 'file';
}
var Store, preferredStore;
if (process.env.REDISTOGO_URL && config.storage.type === 'redis') {
var redisClient = require('redis-url').connect(process.env.REDISTOGO_URL);
Store = require('./lib/document_stores/redis');
preferredStore = new Store(config.storage, redisClient);
}
else {
Store = require('./lib/document_stores/' + config.storage.type);
preferredStore = new Store(config.storage);
}
// Compress the static javascript assets
if (config.recompressStaticAssets) {
var list = fs.readdirSync('./static');
for (var j = 0; j < list.length; j++) {
var item = list[j];
if ((item.indexOf('.js') === item.length - 3) && (item.indexOf('.min.js') === -1)) {
var dest = item.substring(0, item.length - 3) + '.min' + item.substring(item.length - 3);
var orig_code = fs.readFileSync('./static/' + item, 'utf8');
fs.writeFileSync('./static/' + dest, uglify.minify(orig_code).code, 'utf8');
winston.info('compressed ' + item + ' into ' + dest);
}
}
}
// Send the static documents into the preferred store, skipping expirations
var path, data;
for (var name in config.documents) {
path = config.documents[name];
data = fs.readFileSync(path, 'utf8');
winston.info('loading static document', { name: name, path: path });
if (data) {
preferredStore.set(name, data, function(cb) {
winston.debug('loaded static document', { success: cb });
}, true);
}
else {
winston.warn('failed to load static document', { name: name, path: path });
}
}
// Pick up a key generator
var pwOptions = config.keyGenerator || {};
pwOptions.type = pwOptions.type || 'random';
var gen = require('./lib/key_generators/' + pwOptions.type);
var keyGenerator = new gen(pwOptions);
// Configure the document handler
var documentHandler = new DocumentHandler({
store: preferredStore,
maxLength: config.maxLength,
keyLength: config.keyLength,
keyGenerator: keyGenerator
});
var app = connect();
// Rate limit all requests
if (config.rateLimits) {
config.rateLimits.end = true;
app.use(connect_rate_limit(config.rateLimits));
}
// first look at API calls
app.use(route(function(router) {
// get raw documents - support getting with extension
router.get('/raw/:id', function(request, response) {
return documentHandler.handleRawGet(request, response, config);
});
router.head('/raw/:id', function(request, response) {
return documentHandler.handleRawGet(request, response, config);
});
// add documents
router.post('/documents', function(request, response) {
return documentHandler.handlePost(request, response);
});
// get documents
router.get('/documents/:id', function(request, response) {
return documentHandler.handleGet(request, response, config);
});
router.head('/documents/:id', function(request, response) {
return documentHandler.handleGet(request, response, config);
});
}));
// Otherwise, try to match static files
app.use(connect_st({
path: __dirname + '/static',
content: { maxAge: config.staticMaxAge },
passthrough: true,
index: false
}));
// Then we can loop back - and everything else should be a token,
// so route it back to /
app.use(route(function(router) {
router.get('/:id', function(request, response, next) {
request.sturl = '/';
next();
});
}));
// And match index
app.use(connect_st({
path: __dirname + '/static',
content: { maxAge: config.staticMaxAge },
index: 'index.html'
}));
http.createServer(app).listen(config.port, config.host);
winston.info('listening on ' + config.host + ':' + config.port);

5
src/constants/index.ts Normal file
View File

@ -0,0 +1,5 @@
const DEFAULT_KEY_LENGTH = 10
export default {
DEFAULT_KEY_LENGTH
}

60
src/global.d.ts vendored Normal file
View File

@ -0,0 +1,60 @@
declare module 'rethinkdbdash' {
type Result = {
data: string
}
type Callback = (error: unknown, result?: Result) => void
interface RethinkRun {
run(callback: Callback)
}
type RethinkInsertObject = {
id: string
data: string
}
interface RethinkFunctions {
insert(data: RethinkInsertObject): RethinkRun
get(id: string): RethinkRun
}
export interface RethinkClient {
table(tableName: string): RethinkFunctions
}
function rethink<T>(obj: T): RethinkClient<T>
export = rethink
}
declare module 'connect-ratelimit' {
function connectRateLimit(
as: RateLimits
): (
req: express.Request,
res: express.Response,
next: express.NextFunction
) => void
export = connectRateLimit
}
declare namespace Express {
export interface Request {
sturl: string
}
}
declare module 'st' {
type ConnectSt = {
path: string
content: { maxAge: number }
passthrough?: boolean
index: boolean | string
}
function connectSt(st: ConnectSt): express.NextFunction
export = connectSt
}

View File

@ -0,0 +1,21 @@
import buildGenerator from 'src/lib/key-generators/builder'
import type { Config } from 'src/types/config'
import buildStore from 'src/lib/document-stores/builder'
import DocumentHandler from './index'
const build = async (config: Config) => {
const storage = await buildStore(config)
const keyGenerator = await buildGenerator(config)
const documentHandler = new DocumentHandler({
store: storage,
config,
maxLength: config.maxLength,
keyLength: config.keyLength,
keyGenerator
})
return documentHandler
}
export default build

View File

@ -0,0 +1,176 @@
import { Request, Response } from 'express'
import * as winston from 'winston'
import Busboy from 'busboy'
import type { Config } from 'src/types/config'
import type { Document } from 'src/types/document'
import constants from 'src/constants'
import KeyGenerator from 'src/lib/key-generators'
import { Store } from '../document-stores'
class DocumentHandler {
keyLength: number
maxLength?: number
store: Store
keyGenerator: KeyGenerator
config: Config
constructor(options: Document) {
this.keyLength = options.keyLength || constants.DEFAULT_KEY_LENGTH
this.maxLength = options.maxLength // none by default
this.store = options.store
this.config = options.config
this.keyGenerator = options.keyGenerator
}
handleGet(request: Request, response: Response) {
const key = request.params.id.split('.')[0]
const skipExpire = !!this.config.documents[key]
this.store.get(
key,
ret => {
if (ret) {
winston.verbose('retrieved document', { key })
response.writeHead(200, { 'content-type': 'application/json' })
if (request.method === 'HEAD') {
response.end()
} else {
response.end(JSON.stringify({ data: ret, key }))
}
} else {
winston.warn('document not found', { key })
response.writeHead(404, { 'content-type': 'application/json' })
if (request.method === 'HEAD') {
response.end()
} else {
response.end(JSON.stringify({ message: 'Document not found.' }))
}
}
},
skipExpire
)
}
handlePost(request: Request, response: Response) {
// const this = this
let buffer = ''
let cancelled = false
// What to do when done
const onSuccess = () => {
// Check length
if (this.maxLength && buffer.length > this.maxLength) {
cancelled = true
winston.warn('document >maxLength', { maxLength: this.maxLength })
response.writeHead(400, { 'content-type': 'application/json' })
response.end(
JSON.stringify({ message: 'Document exceeds maximum length.' })
)
return
}
// And then save if we should
this.chooseKey(key => {
this.store.set(key, buffer, res => {
if (res) {
winston.verbose('added document', { key })
response.writeHead(200, { 'content-type': 'application/json' })
response.end(JSON.stringify({ key }))
} else {
winston.verbose('error adding document')
response.writeHead(500, { 'content-type': 'application/json' })
response.end(JSON.stringify({ message: 'Error adding document.' }))
}
})
})
}
// If we should, parse a form to grab the data
const ct = request.headers['content-type']
if (ct && ct.split(';')[0] === 'multipart/form-data') {
const busboy = Busboy({ headers: request.headers })
busboy.on('field', (fieldname, val) => {
if (fieldname === 'data') {
buffer = val
}
})
busboy.on('finish', () => {
onSuccess()
})
request.pipe(busboy)
// Otherwise, use our own and just grab flat data from POST body
} else {
request.on('data', data => {
buffer += data.toString()
})
request.on('end', () => {
if (cancelled) {
return
}
onSuccess()
})
request.on('error', error => {
winston.error(`connection error: ${error.message}`)
response.writeHead(500, { 'content-type': 'application/json' })
response.end(JSON.stringify({ message: 'Connection error.' }))
cancelled = true
})
}
}
handleRawGet(request: Request, response: Response) {
const key = request.params.id.split('.')[0]
const skipExpire = !!this.config.documents[key]
this.store.get(
key,
ret => {
if (ret) {
winston.verbose('retrieved raw document', { key })
response.writeHead(200, {
'content-type': 'text/plain; charset=UTF-8'
})
if (request.method === 'HEAD') {
response.end()
} else {
response.end(ret)
}
} else {
winston.warn('raw document not found', { key })
response.writeHead(404, { 'content-type': 'application/json' })
if (request.method === 'HEAD') {
response.end()
} else {
response.end(JSON.stringify({ message: 'Document not found.' }))
}
}
},
skipExpire
)
}
chooseKey = (callback: { (key: string): void }) => {
const key = this.acceptableKey()
if (!key) return
this.store.get(
key,
(ret: string | boolean) => {
if (ret) {
this.chooseKey(callback)
} else {
callback(key)
}
},
true
) // Don't bump expirations when key searching
}
acceptableKey = () => this.keyGenerator.createKey?.(this.keyLength)
}
export default DocumentHandler

View File

@ -0,0 +1,76 @@
import * as winston from 'winston'
import AWS = require('aws-sdk')
import type { AmazonStoreConfig } from 'src/types/config'
import { Callback } from 'src/types/callback'
import { Store } from '.'
class AmazonS3DocumentStore extends Store {
bucket: string | undefined
client: AWS.S3
constructor(options: AmazonStoreConfig) {
super(options)
this.bucket = options.bucket
this.client = new AWS.S3({ region: options.region })
}
get = (
key: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
if (!this.bucket) {
callback(false)
return
}
const req = {
Bucket: this.bucket,
Key: key
}
this.client.getObject(req, (err, data) => {
if (err || !data.Body) {
callback(false)
} else {
callback(data.Body.toString('utf-8'))
if (this.expire && !skipExpire) {
winston.warn('amazon s3 store cannot set expirations on keys')
}
}
})
}
set = (
key: string,
data: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
if (!this.bucket) {
callback(false)
return
}
const req = {
Bucket: this.bucket,
Key: key,
Body: data as AWS.S3.PutObjectOutput,
ContentType: 'text/plain'
}
this.client.putObject(req, err => {
if (err) {
callback(false)
} else {
callback(true)
if (this.expire && !skipExpire) {
winston.warn('amazon s3 store cannot set expirations on keys')
}
}
})
}
}
export default AmazonS3DocumentStore

View File

@ -0,0 +1,12 @@
import type { Config } from 'src/types/config'
import { Store } from '.'
const build = async (config: Config): Promise<Store> => {
const DocumentStore = (
await import(`../document-stores/${config.storage.type}`)
).default
return new DocumentStore(config.storage)
}
export default build

View File

@ -0,0 +1,75 @@
import * as winston from 'winston'
import * as fs from 'fs'
import * as crypto from 'crypto'
import type { Callback } from 'src/types/callback'
import type { FileStoreConfig } from 'src/types/config'
import { Store } from '.'
// Generate md5 of a string
const md5 = (str: string) => {
const md5sum = crypto.createHash('md5')
md5sum.update(str)
return md5sum.digest('hex')
}
// For storing in files
// options[type] = file
// options[path] - Where to store
class FileDocumentStore extends Store {
basePath: string
constructor(options: FileStoreConfig) {
super(options)
this.basePath = options.path || './data'
}
// Get data from a file from key
get = (
key: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
const fn = `${this.basePath}/${md5(key)}`
fs.readFile(fn, 'utf8', (err, data) => {
if (err) {
callback(false)
} else {
callback(data)
if (this.expire && !skipExpire) {
winston.warn('file store cannot set expirations on keys')
}
}
})
}
// Save data in a file, key as md5 - since we don't know what we could
// be passed here
set = (
key: string,
data: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
try {
fs.mkdir(this.basePath, '700', () => {
const fn = `${this.basePath}/${md5(key)}`
fs.writeFile(fn, data, 'utf8', err => {
if (err) {
callback(false)
} else {
callback(true)
if (this.expire && !skipExpire) {
winston.warn('file store cannot set expirations on keys')
}
}
})
})
} catch (err) {
callback(false)
}
}
}
export default FileDocumentStore

View File

@ -0,0 +1,110 @@
import Datastore = require('@google-cloud/datastore')
import * as winston from 'winston'
import type { Callback } from 'src/types/callback'
import type { GoogleStoreConfig } from 'src/types/config'
import { Store } from '.'
class GoogleDatastoreDocumentStore extends Store {
kind: string
datastore: Datastore
// Create a new store with options
constructor(options: GoogleStoreConfig) {
super(options)
this.kind = 'Haste'
this.datastore = new Datastore()
}
// Save file in a key
set = (
key: string,
data: string,
callback: Callback,
skipExpire?: boolean
) => {
const expireTime =
skipExpire || this.expire === undefined
? null
: new Date(Date.now() + this.expire * 1000)
const taskKey = this.datastore.key([this.kind, key])
const task = {
key: taskKey,
data: [
{
name: 'value',
value: data,
excludeFromIndexes: true
},
{
name: 'expiration',
value: expireTime
}
]
}
this.datastore
.insert(task)
.then(() => {
callback(true)
})
.catch(() => {
callback(false)
})
}
// Get a file from a key
get = (key: string, callback: Callback, skipExpire?: boolean): void => {
const taskKey = this.datastore.key([this.kind, key])
this.datastore
.get(taskKey)
.then(entity => {
if (skipExpire || entity[0]?.expiration == null) {
callback(entity[0].value)
} else if (entity[0].expiration < new Date()) {
winston.info('document expired', {
key,
expiration: entity[0].expiration,
check: new Date()
})
callback(false)
} else {
// update expiry
const task = {
key: taskKey,
data: [
{
name: 'value',
value: entity[0]?.value,
excludeFromIndexes: true
},
{
name: 'expiration',
value: new Date(
Date.now() + (this.expire ? this.expire * 1000 : 0)
)
}
]
}
this.datastore
.update(task)
.then(() => {})
.catch(err => {
winston.error('failed to update expiration', { error: err })
})
callback(entity[0]?.value)
}
})
.catch(err => {
winston.error('Error retrieving value from Google Datastore', {
error: err
})
callback(false)
})
}
}
export default GoogleDatastoreDocumentStore

View File

@ -0,0 +1,25 @@
import { BaseStoreConfig } from 'src/types/config'
export type Callback = (data: boolean | string) => void
export abstract class Store {
type: string
expire?: number
constructor(config: BaseStoreConfig) {
this.type = config.type
if (this.expire) {
this.expire = config.expire
}
}
abstract get: (key: string, callback: Callback, skipExpire?: boolean) => void
abstract set: (
key: string,
data: string,
callback: Callback,
skipExpire?: boolean
) => void
}

View File

@ -0,0 +1,66 @@
import * as winston from 'winston'
import Memcached = require('memcached')
import type { Callback } from 'src/types/callback'
import type { MemcachedStoreConfig } from 'src/types/config'
import { Store } from '.'
class MemcachedDocumentStore extends Store {
client: Memcached
// Create a new store with options
constructor(options: MemcachedStoreConfig) {
super(options)
const host = options.host || '127.0.0.1'
const port = options.port || 11211
const url = `${host}:${port}`
// Create a connection
this.client = new Memcached(url)
winston.info(`connecting to memcached on ${url}`)
this.client.on('failure', (error: Memcached.IssueData) => {
winston.info('error connecting to memcached', { error })
})
}
// Get a file from a key
get = (
key: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
this.client?.get(key, (error, data: string) => {
const value = error ? false : data
callback(value as string)
// Update the key so that the expiration is pushed forward
if (value && !skipExpire) {
this.set(
key,
data,
updateSucceeded => {
if (!updateSucceeded) {
winston.error('failed to update expiration on GET', { key })
}
},
skipExpire
)
}
})
}
// Save file in a key
set = (
key: string,
data: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
this.client?.set(key, data, skipExpire ? 0 : this.expire || 0, error => {
callback(!error)
})
}
}
export default MemcachedDocumentStore

View File

@ -0,0 +1,126 @@
import * as winston from 'winston'
import mongodb = require('mongodb')
import type { Callback } from 'src/types/callback'
import type { MongoStoreConfig } from 'src/types/config'
import { Store } from '.'
const { MongoClient } = mongodb
type ConnectCallback = (error?: Error, db?: mongodb.MongoClient) => void
class MongoDocumentStore extends Store {
connectionUrl: string
constructor(options: MongoStoreConfig) {
super(options)
this.connectionUrl = process.env.DATABASE_URl || options.connectionUrl
}
safeConnect = (callback: ConnectCallback) => {
MongoClient.connect(this.connectionUrl, (err, client) => {
if (err) {
winston.error('error connecting to mongodb', { error: err })
callback(err)
} else {
callback(undefined, client)
}
})
}
get = (
key: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
const now = Math.floor(new Date().getTime() / 1000)
this.safeConnect((err, client) => {
if (err) return callback(false)
return client
?.db()
.collection('entries')
.findOne(
{
entry_id: key,
$or: [{ expiration: -1 }, { expiration: { $gt: now } }]
},
(error?: Error, entry?) => {
if (error) {
winston.error('error persisting value to mongodb', { error })
return callback(false)
}
callback(entry === null ? false : entry?.value)
if (
entry !== null &&
entry?.expiration !== -1 &&
this.expire &&
!skipExpire
) {
return client
.db()
.collection('entries')
.update(
{
entry_id: key
},
{
$set: {
expiration: this.expire + now
}
},
{},
() => {}
)
}
return true
}
)
})
}
set = (
key: string,
data: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
const now = Math.floor(new Date().getTime() / 1000)
this.safeConnect((err, client) => {
if (err) return callback(false)
return client
?.db()
.collection('entries')
.update(
{
entry_id: key,
$or: [{ expiration: -1 }, { expiration: { $gt: now } }]
},
{
entry_id: key,
value: data,
expiration: this.expire && !skipExpire ? this.expire + now : -1
},
{
upsert: true
},
(error?: Error) => {
if (error) {
winston.error('error persisting value to mongodb', { error })
return callback(false)
}
return callback(true)
}
)
})
}
}
export default MongoDocumentStore

View File

@ -0,0 +1,111 @@
import * as winston from 'winston'
import Pg = require('pg')
import type { Callback } from 'src/types/callback'
import type { PostgresStoreConfig } from 'src/types/config'
import { Store } from '.'
const { Pool } = Pg
type ConnectCallback = (
error?: Error,
client?: Pg.PoolClient,
done?: () => void
) => void
// A postgres document store
class PostgresDocumentStore extends Store {
pool: Pg.Pool
constructor(options: PostgresStoreConfig) {
super(options)
const connectionString = process.env.DATABASE_URL || options.connectionUrl
this.pool = new Pool({ connectionString })
}
// A connection wrapper
safeConnect = (callback: ConnectCallback) => {
this.pool.connect(
(error: Error, client: Pg.PoolClient, done: () => void) => {
if (error) {
winston.error('error connecting to postgres', { error })
callback(error)
} else {
callback(undefined, client, done)
}
}
)
}
// Get a given key's data
get = (
key: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
const now = Math.floor(new Date().getTime() / 1000)
this.safeConnect((err, client, done): void => {
if (err) {
return callback(false)
}
return client?.query(
'SELECT id,value,expiration from entries where KEY = $1 and (expiration IS NULL or expiration > $2)',
[key, now],
(error: Error, result) => {
if (error) {
winston.error('error retrieving value from postgres', {
error
})
return callback(false)
}
callback(result.rows.length ? result.rows[0].value : false)
if (result.rows.length && this.expire && !skipExpire) {
return client.query(
'UPDATE entries SET expiration = $1 WHERE ID = $2',
[this.expire + now, result.rows[0].id],
(currentErr: Error) => {
if (!currentErr) {
return done?.()
}
return callback(false)
}
)
}
return done?.()
}
)
})
}
// Set a given key
set = (
key: string,
data: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
const now = Math.floor(new Date().getTime() / 1000)
this.safeConnect((err, client, done) => {
if (err) {
return callback(false)
}
return client?.query(
'INSERT INTO entries (key, value, expiration) VALUES ($1, $2, $3)',
[key, data, this.expire && !skipExpire ? this.expire + now : null],
(error: Error) => {
if (error) {
winston.error('error persisting value to postgres', { error })
return callback(false)
}
callback(true)
return done?.()
}
)
})
}
}
export default PostgresDocumentStore

View File

@ -0,0 +1,103 @@
import * as winston from 'winston'
import redis = require('redis')
import type { Callback } from 'src/types/callback'
import { RedisStoreConfig } from 'src/types/config'
import { Store } from '.'
const { createClient } = redis
export type RedisClientType = ReturnType<typeof redis.createClient>
// For storing in redis
// options[type] = redis
// options[url] - the url to connect to redis
// options[host] - The host to connect to (default localhost)
// options[port] - The port to connect to (default 5379)
// options[db] - The db to use (default 0)
// options[expire] - The time to live for each key set (default never)
class RedisDocumentStore extends Store {
client: RedisClientType
constructor(options: RedisStoreConfig) {
super(options)
const url = process.env.REDISTOGO_URL || options.url
const host = options.host || '127.0.0.1'
const port = options.port || '6379'
const index = options.db || 0
winston.info('configuring redis')
const connectionParameters = url
? {
url
}
: {
host,
port
}
const config = {
...connectionParameters,
database: index,
...(options.username ? { username: options.username } : {}),
...(options.password ? { username: options.username } : {})
}
this.client = createClient(config)
this.connect(index)
}
connect = (index: number) => {
this.client.connect()
this.client.on('error', err => {
winston.error('redis disconnected', err)
})
this.client
.select(index)
.then(() => {
winston.info(`connected to redis on ${index}`)
})
.catch(err => {
winston.error(`error connecting to redis index ${index}`, {
error: err
})
process.exit(1)
})
}
getExpire = (skipExpire?: boolean) => (!skipExpire ? { EX: this.expire } : {})
get = (key: string, callback: Callback): void => {
this.client
.get(key)
.then(reply => {
callback(reply || false)
})
.catch(() => {
callback(false)
})
}
set = (
key: string,
data: string,
callback: Callback,
skipExpire?: boolean | undefined
): void => {
this.client
.set(key, data, this.getExpire(skipExpire))
.then(() => {
callback(true)
})
.catch(() => {
callback(false)
})
}
}
export default RedisDocumentStore

View File

@ -0,0 +1,60 @@
import * as winston from 'winston'
import * as crypto from 'crypto'
import rethink = require('rethinkdbdash')
import type { RethinkDbStoreConfig } from 'src/types/config'
import type { Callback } from 'src/types/callback'
import { Store } from '.'
const md5 = (str: string) => {
const md5sum = crypto.createHash('md5')
md5sum.update(str)
return md5sum.digest('hex')
}
class RethinkDBStore extends Store {
client: rethink.RethinkClient
constructor(options: RethinkDbStoreConfig) {
super(options)
this.client = rethink({
silent: true,
host: options.host || '127.0.0.1',
port: options.port || 28015,
db: options.db || 'haste',
user: options.user || 'admin',
password: options.password || ''
})
}
set = (key: string, data: string, callback: Callback): void => {
this.client
.table('uploads')
.insert({ id: md5(key), data })
.run(error => {
if (error) {
callback(false)
winston.error('failed to insert to table', error)
return
}
callback(true)
})
}
get = (key: string, callback: Callback): void => {
this.client
.table('uploads')
.get(md5(key))
.run((error, result) => {
if (error || !result) {
callback(false)
if (error) winston.error('failed to insert to table', error)
return
}
callback(result.data)
})
}
}
export default RethinkDBStore

27
src/lib/helpers/config.ts Normal file
View File

@ -0,0 +1,27 @@
import * as fs from 'fs'
import * as path from 'path'
import { Config } from 'src/types/config'
const getConfig = (): Config => {
const configPath =
process.argv.length <= 2 ? 'project-config.js' : process.argv[2]
const config = JSON.parse(
fs.readFileSync(path.join('config', configPath), 'utf8')
)
config.port = Number(process.env.PORT) || config.port || 7777
config.host = process.env.HOST || config.host || 'localhost'
if (!config.storage) {
config.storage = {}
}
if (!config.storage.type) {
config.storage.type = 'file'
}
return config
}
export default getConfig

View File

@ -0,0 +1,7 @@
import * as path from 'path'
export const getStaticDirectory = (baseDirectory: string) =>
path.join(baseDirectory, '..', 'static')
export const getStaticItemDirectory = (baseDirectory: string, item: string) =>
path.join(baseDirectory, '..', 'static', item)

24
src/lib/helpers/log.ts Normal file
View File

@ -0,0 +1,24 @@
import * as winston from 'winston'
import type { Config } from 'src/types/config'
import { Logging, LoggingType } from 'src/types/log'
const addLogging = (config: Config) => {
try {
winston.remove(winston.transports.Console)
} catch (e) {
/* was not present */
}
let detail: Logging
let type: LoggingType
for (let i = 0; i < config.logging.length; i += 1) {
detail = config.logging[i]
type = detail.type
const transport = winston.transports[type]
winston.add(transport, detail)
}
}
export default addLogging

View File

@ -0,0 +1,14 @@
import type { Config } from 'src/types/config'
import KeyGenerator from '.'
const build = async (config: Config): Promise<KeyGenerator> => {
const pwOptions = config.keyGenerator
pwOptions.type = pwOptions.type || 'random'
const Generator = (await import(`../key-generators/${pwOptions.type}`))
.default
const keyGenerator = new Generator(pwOptions)
return keyGenerator
}
export default build

View File

@ -0,0 +1,41 @@
import * as fs from 'fs'
import type { KeyGeneratorConfig } from 'src/types/config'
import KeyGenerator from '.'
class DictionaryGenerator extends KeyGenerator {
type: string
dictionary: string[]
constructor(options: KeyGeneratorConfig, readyCallback?: () => void) {
super(options)
if (!options) throw Error('No options passed to generator')
if (!options.path) throw Error('No dictionary path specified in options')
this.dictionary = []
this.type = options.type
// Load dictionary
fs.readFile(options.path, 'utf8', (err, data) => {
if (err) throw err
this.dictionary = data.split(/[\n\r]+/)
readyCallback?.()
})
}
// Generates a dictionary-based key, of keyLength words
createKey(keyLength: number): string {
let text = ''
for (let i = 0; i < keyLength; i += 1) {
const index = Math.floor(Math.random() * this.dictionary.length)
text += this.dictionary[index]
}
return text
}
}
export default DictionaryGenerator

View File

@ -0,0 +1,13 @@
import type { KeyGeneratorConfig } from 'src/types/config'
abstract class KeyGenerator {
type: string
constructor(options: KeyGeneratorConfig) {
this.type = options.type
}
abstract createKey(keyLength: number): string
}
export default KeyGenerator

View File

@ -0,0 +1,26 @@
// Draws inspiration from pwgen and http://tools.arantius.com/password
import KeyGenerator from '.'
const randOf = (collection: string) => () =>
collection[Math.floor(Math.random() * collection.length)]
// Helper methods to get an random vowel or consonant
const randVowel = randOf('aeiou')
const randConsonant = randOf('bcdfghjklmnpqrstvwxyz')
class PhoneticKeyGenerator extends KeyGenerator {
// Generate a phonetic key of alternating consonant & vowel
// eslint-disable-next-line class-methods-use-this
createKey(keyLength: number) {
let text = ''
const start = Math.round(Math.random())
for (let i = 0; i < keyLength; i += 1) {
text += i % 2 === start ? randConsonant() : randVowel()
}
return text
}
}
export default PhoneticKeyGenerator

View File

@ -0,0 +1,28 @@
import type { KeyGeneratorConfig } from 'src/types/config'
import KeyGenerator from '.'
class RandomKeyGenerator extends KeyGenerator {
keyspace: string
// Initialize a new generator with the given keySpace
constructor(options: KeyGeneratorConfig) {
super(options)
this.keyspace =
options.keyspace ||
'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
}
// Generate a key of the given length
createKey(keyLength: number): string {
let text = ''
for (let i = 0; i < keyLength; i += 1) {
const index = Math.floor(Math.random() * this.keyspace.length)
text += this.keyspace.charAt(index)
}
return text
}
}
export default RandomKeyGenerator

142
src/server.ts Normal file
View File

@ -0,0 +1,142 @@
import express, { Express, Request } from 'express'
import * as fs from 'fs'
import * as winston from 'winston'
import uglify from 'uglify-js'
import connectSt from 'st'
import connectRateLimit from 'connect-ratelimit'
import { Config } from 'src/types/config'
import getConfig from 'src/lib/helpers/config'
import addLogging from 'src/lib/helpers/log'
import DocumentHandler from 'src/lib/document-handler'
import buildDocumenthandler from 'src/lib/document-handler/builder'
import {
getStaticDirectory,
getStaticItemDirectory
} from 'src/lib/helpers/directory'
const config: Config = getConfig()
if (config.logging) {
addLogging(config)
}
buildDocumenthandler(config)
.then((documentHandler: DocumentHandler) => {
// Compress the static javascript assets
if (config.recompressStaticAssets) {
const list = fs.readdirSync(getStaticDirectory(__dirname))
for (let j = 0; j < list.length; j += 1) {
const item = list[j]
if (
item.indexOf('.js') === item.length - 3 &&
item.indexOf('.min.js') === -1
) {
const dest = `${item.substring(
0,
item.length - 3
)}.min${item.substring(item.length - 3)}`
const origCode = fs.readFileSync(
getStaticItemDirectory(__dirname, item),
'utf8'
)
fs.writeFileSync(
getStaticItemDirectory(__dirname, dest),
uglify.minify(origCode).code,
'utf8'
)
winston.info(`compressed ${item} into ${dest}`)
}
}
}
// Send the static documents into the preferred store, skipping expirations
let documentPath: string
let data: string
Object.keys(config.documents).forEach(name => {
documentPath = config.documents[name]
data = fs.readFileSync(documentPath, 'utf8')
winston.info('loading static document', { name, path: documentPath })
if (data) {
documentHandler.store?.set(
name,
data,
cb => {
winston.debug('loaded static document', { success: cb })
},
true
)
} else {
winston.warn('failed to load static document', {
name,
path: documentPath
})
}
})
const app: Express = express()
// Rate limit all requests
if (config.rateLimits) {
config.rateLimits.end = true
app.use(connectRateLimit(config.rateLimits))
}
// get raw documents - support getting with extension
app.get('/raw/:id', async (request, response) =>
documentHandler.handleRawGet(request, response)
)
app.head('/raw/:id', (request, response) =>
documentHandler.handleRawGet(request, response)
)
// // add documents
app.post('/documents', (request, response) =>
documentHandler.handlePost(request, response)
)
// get documents
app.get('/documents/:id', (request, response) =>
documentHandler.handleGet(request, response)
)
app.head('/documents/:id', (request, response) =>
documentHandler.handleGet(request, response)
)
// Otherwise, try to match static files
app.use(
connectSt({
path: getStaticDirectory(__dirname),
content: { maxAge: config.staticMaxAge },
passthrough: true,
index: false
})
)
// Then we can loop back - and everything else should be a token,
// so route it back to /
app.get('/:id', (request: Request, response, next) => {
request.sturl = '/'
next()
})
// And match index
app.use(
connectSt({
path: getStaticDirectory(__dirname),
content: { maxAge: config.staticMaxAge },
index: 'index.html'
})
)
app.listen(config.port, config.host, () => {
winston.info(`listening on ${config.host}:${config.port}`)
})
})
.catch(e => {
winston.error(`server couldn't start, an error occured on ${e.message}`)
})

1
src/types/callback.ts Normal file
View File

@ -0,0 +1 @@
export type Callback = (data: boolean | string) => void

89
src/types/config.ts Normal file
View File

@ -0,0 +1,89 @@
import { Logging } from './log'
import { RateLimits } from './rate-limits'
import { StoreNames } from './store-names'
export interface Config {
host: string
port: number
keyLength: number
maxLength: number
staticMaxAge: number
recompressStaticAssets: boolean
logging: Logging[]
keyGenerator: KeyGeneratorConfig
rateLimits: RateLimits
storage: StoreConfig
documents: Record<string, string>
}
export type BaseStoreConfig = {
type: StoreNames
expire?: number
}
export interface MongoStoreConfig extends BaseStoreConfig {
connectionUrl: string
type: StoreNames.Mongo
}
export interface MemcachedStoreConfig extends BaseStoreConfig {
host: string
port: number
type: StoreNames.Memcached
}
export interface FileStoreConfig extends BaseStoreConfig {
path: string
type: StoreNames.File
}
export interface AmazonStoreConfig extends BaseStoreConfig {
bucket: string
region: string
type: StoreNames.AmazonS3
}
export interface PostgresStoreConfig extends BaseStoreConfig {
connectionUrl: string
type: StoreNames.Postgres
}
export interface RethinkDbStoreConfig extends BaseStoreConfig {
host: string
port: string
db: string
user: string
password: string
type: StoreNames.RethinkDb
}
export interface RedisStoreConfig extends BaseStoreConfig {
url?: string
db?: number
user?: string
username?: string | undefined
password?: string
host?: string
port?: string
type: StoreNames.Redis
}
export interface GoogleStoreConfig extends BaseStoreConfig {
type: StoreNames.GoogleDataStore
}
export type StoreConfig =
| MongoStoreConfig
| MemcachedStoreConfig
| FileStoreConfig
| AmazonStoreConfig
| PostgresStoreConfig
| RethinkDbStoreConfig
| RedisStoreConfig
| GoogleStoreConfig
export interface KeyGeneratorConfig {
type: string
keyspace?: string
path?: string
}

15
src/types/document.ts Normal file
View File

@ -0,0 +1,15 @@
import { Store } from 'src/lib/document-stores'
import KeyGenerator from 'src/lib/key-generators'
import type { Config } from './config'
export type Document = {
store: Store
config: Config
keyGenerator: KeyGenerator
maxLength?: number
keyLength?: number
}
export interface Documents {
about: string
}

13
src/types/log.ts Normal file
View File

@ -0,0 +1,13 @@
export type LoggingType =
| 'File'
| 'Console'
| 'Loggly'
| 'DailyRotateFile'
| 'Http'
| 'Memory'
| 'Webhook'
export interface Logging {
level: string
type: LoggingType
}

13
src/types/rate-limits.ts Normal file
View File

@ -0,0 +1,13 @@
export interface Normal {
totalRequests: number
every: number
}
export interface Categories {
normal: Normal
}
export interface RateLimits {
end?: boolean
categories: Categories
}

11
src/types/store-names.ts Normal file
View File

@ -0,0 +1,11 @@
// eslint-disable-next-line import/prefer-default-export
export enum StoreNames {
AmazonS3 = 'amazon-s3',
File = 'file',
GoogleDataStore = 'google-datastore',
Memcached = 'memcached',
Mongo = 'mongo',
Postgres = 'postgres',
Redis = 'redis',
RethinkDb = 'rethinkdb'
}

View File

@ -0,0 +1,35 @@
import { createMock } from 'ts-auto-mock'
import DocumentHandler from 'src/lib/document-handler/index'
import Generator from 'src/lib/key-generators/random'
import constants from 'src/constants'
import { Config } from 'src/types/config'
import { Store } from 'src/lib/document-stores'
const store: Store = createMock<Store>()
const config: Config = createMock<Config>()
describe('document-handler', () => {
describe('with random key', () => {
it('should choose a key of the proper length', () => {
const gen = new Generator({ type: 'random' })
const dh = new DocumentHandler({
keyLength: 6,
keyGenerator: gen,
store,
config
})
expect(dh.acceptableKey()?.length).toEqual(6)
})
it('should choose a default key length', () => {
const gen = new Generator({ type: 'random' })
const dh = new DocumentHandler({
keyGenerator: gen,
maxLength: 1,
store,
config
})
expect(dh.keyLength).toEqual(constants.DEFAULT_KEY_LENGTH)
})
})
})

View File

@ -0,0 +1,53 @@
import RedisDocumentStore from 'src/lib/document-stores/redis'
import { StoreNames } from 'src/types/store-names'
describe('Redis document store', () => {
let store: RedisDocumentStore
/* reconnect to redis on each test */
afterEach(() => {
if (store) {
store.client?.quit()
}
})
describe('set', () => {
it('should be able to set a key and have an expiration set', async () => {
store = new RedisDocumentStore({
expire: 10,
type: StoreNames.Redis
})
return store.set('hello1', 'world', async () => {
const res = await store.client?.ttl('hello1')
expect(res).toBeGreaterThan(1)
})
})
it('should not set an expiration when told not to', async () => {
store = new RedisDocumentStore({
expire: 10,
type: StoreNames.Redis
})
store.set(
'hello2',
'world',
async () => {
const res = await store.client?.ttl('hello2')
expect(res).toEqual(-1)
},
true
)
})
it('should not set an expiration when expiration is off', async () => {
store = new RedisDocumentStore({
type: StoreNames.Redis
})
store.set('hello3', 'world', async () => {
const res = await store.client?.ttl('hello3')
expect(res).toEqual(-1)
})
})
})
})

View File

@ -1,26 +0,0 @@
/* global describe, it */
var assert = require('assert');
var DocumentHandler = require('../lib/document_handler');
var Generator = require('../lib/key_generators/random');
describe('document_handler', function() {
describe('randomKey', function() {
it('should choose a key of the proper length', function() {
var gen = new Generator();
var dh = new DocumentHandler({ keyLength: 6, keyGenerator: gen });
assert.equal(6, dh.acceptableKey().length);
});
it('should choose a default key length', function() {
var gen = new Generator();
var dh = new DocumentHandler({ keyGenerator: gen });
assert.equal(dh.keyLength, DocumentHandler.defaultKeyLength);
});
});
});

View File

@ -0,0 +1,24 @@
import Generator from 'src/lib/key-generators/dictionary'
jest.mock('fs', () => ({
readFile: jest
.fn()
.mockImplementation((_, a, callback) => callback(null, 'cat'))
}))
describe('DictionaryGenerator', () => {
describe('options', () => {
it('should throw an error if given no options or path', () => {
expect(() => new Generator({ type: '' })).toThrow()
})
})
describe('generation', () => {
it('should return a key of the proper number of words from the given dictionary', () => {
const path = '/tmp/haste-server-test-dictionary'
const gen = new Generator({ path, type: '' })
expect(gen.createKey(3)).toEqual('catcatcat')
})
})
})

View File

@ -0,0 +1,30 @@
/* eslint-disable jest/no-conditional-expect */
import Generator from 'src/lib/key-generators/phonetic'
const vowels = 'aeiou'
const consonants = 'bcdfghjklmnpqrstvwxyz'
describe('PhoneticKeyGenerator', () => {
describe('generation', () => {
it('should return a key of the proper length', () => {
const gen = new Generator({ type: 'phonetic' })
expect(gen.createKey(6).length).toEqual(6)
})
it('should alternate consonants and vowels', () => {
const gen = new Generator({ type: 'phonetic' })
const key = gen.createKey(3)
// if it starts with a consonant, we expect cvc
// if it starts with a vowel, we expect vcv
if (consonants.includes(key[0])) {
expect(consonants.includes(key[0])).toBeTruthy()
expect(consonants.includes(key[2])).toBeTruthy()
expect(vowels.includes(key[1])).toBeTruthy()
} else {
expect(vowels.includes(key[0])).toBeTruthy()
expect(vowels.includes(key[2])).toBeTruthy()
expect(consonants.includes(key[1])).toBeTruthy()
}
})
})
})

View File

@ -0,0 +1,20 @@
import Generator from 'src/lib/key-generators/random'
describe('RandomKeyGenerator', () => {
describe('generation', () => {
it('should return a key of the proper length', () => {
const gen = new Generator({ type: 'random' })
expect(gen.createKey(6).length).toEqual(6)
})
it('should use a key from the given keyset if given', () => {
const gen = new Generator({ type: 'random', keyspace: 'A' })
expect(gen.createKey(6)).toEqual('AAAAAA')
})
it('should not use a key from the given keyset if not given', () => {
const gen = new Generator({ type: 'random', keyspace: 'A' })
expect(gen.createKey(6).includes('B')).toBeFalsy()
})
})
})

View File

@ -1,34 +0,0 @@
/* global describe, it */
const assert = require('assert');
const fs = require('fs');
const Generator = require('../../lib/key_generators/dictionary');
describe('DictionaryGenerator', function() {
describe('options', function() {
it('should throw an error if given no options', () => {
assert.throws(() => {
new Generator();
}, Error);
});
it('should throw an error if given no path', () => {
assert.throws(() => {
new Generator({});
}, Error);
});
});
describe('generation', function() {
it('should return a key of the proper number of words from the given dictionary', () => {
const path = '/tmp/haste-server-test-dictionary';
const words = ['cat'];
fs.writeFileSync(path, words.join('\n'));
const gen = new Generator({path}, () => {
assert.equal('catcatcat', gen.createKey(3));
});
});
});
});

View File

@ -1,35 +0,0 @@
/* global describe, it */
const assert = require('assert');
const Generator = require('../../lib/key_generators/phonetic');
const vowels = 'aeiou';
const consonants = 'bcdfghjklmnpqrstvwxyz';
describe('PhoneticKeyGenerator', () => {
describe('generation', () => {
it('should return a key of the proper length', () => {
const gen = new Generator();
assert.equal(6, gen.createKey(6).length);
});
it('should alternate consonants and vowels', () => {
const gen = new Generator();
const key = gen.createKey(3);
// if it starts with a consonant, we expect cvc
// if it starts with a vowel, we expect vcv
if(consonants.includes(key[0])) {
assert.ok(consonants.includes(key[0]));
assert.ok(consonants.includes(key[2]));
assert.ok(vowels.includes(key[1]));
} else {
assert.ok(vowels.includes(key[0]));
assert.ok(vowels.includes(key[2]));
assert.ok(consonants.includes(key[1]));
}
});
});
});

View File

@ -1,24 +0,0 @@
/* global describe, it */
const assert = require('assert');
const Generator = require('../../lib/key_generators/random');
describe('RandomKeyGenerator', () => {
describe('generation', () => {
it('should return a key of the proper length', () => {
const gen = new Generator();
assert.equal(gen.createKey(6).length, 6);
});
it('should use a key from the given keyset if given', () => {
const gen = new Generator({keyspace: 'A'});
assert.equal(gen.createKey(6), 'AAAAAA');
});
it('should not use a key from the given keyset if not given', () => {
const gen = new Generator({keyspace: 'A'});
assert.ok(!gen.createKey(6).includes('B'));
});
});
});

View File

@ -1,54 +0,0 @@
/* global it, describe, afterEach */
var assert = require('assert');
var winston = require('winston');
winston.remove(winston.transports.Console);
var RedisDocumentStore = require('../lib/document_stores/redis');
describe('redis_document_store', function() {
/* reconnect to redis on each test */
afterEach(function() {
if (RedisDocumentStore.client) {
RedisDocumentStore.client.quit();
RedisDocumentStore.client = false;
}
});
describe('set', function() {
it('should be able to set a key and have an expiration set', function(done) {
var store = new RedisDocumentStore({ expire: 10 });
store.set('hello1', 'world', function() {
RedisDocumentStore.client.ttl('hello1', function(err, res) {
assert.ok(res > 1);
done();
});
});
});
it('should not set an expiration when told not to', function(done) {
var store = new RedisDocumentStore({ expire: 10 });
store.set('hello2', 'world', function() {
RedisDocumentStore.client.ttl('hello2', function(err, res) {
assert.equal(-1, res);
done();
});
}, true);
});
it('should not set an expiration when expiration is off', function(done) {
var store = new RedisDocumentStore({ expire: false });
store.set('hello3', 'world', function() {
RedisDocumentStore.client.ttl('hello3', function(err, res) {
assert.equal(-1, res);
done();
});
});
});
});
});

38
tsconfig.json Normal file
View File

@ -0,0 +1,38 @@
{
"ts-node": {
"files": true
},
"files": ["src/global.d.ts"],
"compilerOptions": {
"allowJs": true,
"composite": false,
"declaration": true,
"declarationMap": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"incremental": true,
"inlineSources": false,
"isolatedModules": true,
"lib": ["ES2021", "DOM", "DOM.Iterable"],
"moduleResolution": "node",
"noUnusedLocals": false,
"noUnusedParameters": false,
"preserveWatchOutput": true,
"resolveJsonModule": true,
"skipLibCheck": true,
"strict": true,
"typeRoots": ["node_modules/@types", "src/global.d.ts"],
"target": "es2021",
"noEmit": false,
"module": "commonjs",
"sourceMap": true,
"outDir": "dist",
"baseUrl": ".",
"paths": {
"*": ["node_modules/*"],
"src/*": ["./src/*"]
}
},
"include": ["src", "**/*.ts"],
"exclude": ["node_modules"]
}

6084
yarn.lock Normal file

File diff suppressed because it is too large Load Diff