diff --git a/VERSION b/VERSION index 341cf11f..9325c3cc 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -0.2.0 \ No newline at end of file +0.3.0 \ No newline at end of file diff --git a/WORKLOG.md b/WORKLOG.md index 10c36631..bb9934a4 100644 --- a/WORKLOG.md +++ b/WORKLOG.md @@ -6,7 +6,9 @@ * error log * account lookup w/ pw reset - * nice to have + * treats + * constructs jiggle when clicked + * background colour changes depending on time of day * bot game grind @@ -35,6 +37,8 @@ * fuck magic * empower on ko +* rework vecs into sets + *$$$* * chatwheel * eth adapter diff --git a/acp/package.json b/acp/package.json index a914b2e5..ef344ba8 100644 --- a/acp/package.json +++ b/acp/package.json @@ -1,6 +1,6 @@ { "name": "mnml-client", - "version": "0.2.0", + "version": "0.3.0", "description": "", "main": "index.js", "scripts": { diff --git a/bin/client.sh b/bin/client.sh index a2407748..922b77d3 100755 --- a/bin/client.sh +++ b/bin/client.sh @@ -18,3 +18,4 @@ cd $MNML_PATH/acp rm -rf dist npm i npm run build +mv dist/* $MNML_PATH/client/dist/ diff --git a/bin/db.sh b/bin/db.sh new file mode 100755 index 00000000..15faafe8 --- /dev/null +++ b/bin/db.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +# POSTGRES SETUP +sudo -u postgres createdb mnml +sudo -u postgres createuser --encrypted mnml + +PG_PASSWORD=$(openssl rand -hex 16) +echo "database password is $PG_PASSWORD" +sudo -u -E postgres psql -c "alter user mnml with encrypted password '$PG_PASSWORD';" diff --git a/bin/deploy.sh b/bin/deploy.sh index 94a2bd26..15778cd7 100755 --- a/bin/deploy.sh +++ b/bin/deploy.sh @@ -8,30 +8,25 @@ VERSION=$(<"$MNML_PATH/VERSION") SERVER_BIN_DIR="/usr/local/mnml/bin" CLIENT_DIST_DIR="/var/lib/mnml/client" -CLIENT_PUBLIC_DIR="/var/lib/mnml/public/client" +CLIENT_PUBLIC_DIR="/var/lib/mnml/public/current" -ACP_DIST_DIR="/var/lib/mnml/acp" -ACP_PUBLIC_DIR="/var/lib/mnml/public/acp" +TARGET=$1 + +echo "syncing server $VERSION to $TARGET" # server updates echo "syncing server $VERSION " -rsync -a --delete --delete-excluded "$MNML_PATH/server/target/release/mnml" mnml:"$SERVER_BIN_DIR/$VERSION" -ssh -q mnml ln -nfs "$SERVER_BIN_DIR/$VERSION" "$SERVER_BIN_DIR/mnml" -ssh -q mnml ls -lah "$SERVER_BIN_DIR" +rsync -a --delete --delete-excluded "$MNML_PATH/server/target/release/mnml" "$TARGET:$SERVER_BIN_DIR/$VERSION" +ssh -q "$TARGET" ln -nfs "$SERVER_BIN_DIR/$VERSION" "$SERVER_BIN_DIR/mnml" +ssh -q "$TARGET" ls -lah "$SERVER_BIN_DIR" # client updates echo "syncing client $VERSION" -rsync -a --delete --delete-excluded "$MNML_PATH/client/dist/" mnml:"$CLIENT_DIST_DIR/$VERSION/" -ssh -q mnml ln -nfs "$CLIENT_DIST_DIR/$VERSION" "$CLIENT_PUBLIC_DIR" - -# acp updates -echo "syncing acp $VERSION" -rsync -a --delete --delete-excluded "$MNML_PATH/acp/dist/" mnml:"$ACP_DIST_DIR/$VERSION/" -ssh -q mnml ln -nfs "$ACP_DIST_DIR/$VERSION" "$ACP_PUBLIC_DIR" -ssh -q mnml ls -lah "/var/lib/mnml/public" +rsync -a --delete --delete-excluded "$MNML_PATH/client/dist/" "$TARGET:$CLIENT_DIST_DIR/$VERSION/" +ssh -q "$TARGET" ln -nfs "$CLIENT_DIST_DIR/$VERSION" "$CLIENT_PUBLIC_DIR" echo "restarting mnml service" -ssh -q -t mnml sudo service mnml restart && sleep 1 && systemctl --no-pager status mnml +ssh -q -t "$TARGET" sudo service mnml restart && sleep 1 && systemctl --no-pager status mnml echo "restarting nginx service" -ssh -q -t mnml sudo service nginx restart && sleep 1 && systemctl --no-pager status nginx +ssh -q -t "$TARGET" sudo service nginx restart && sleep 1 && systemctl --no-pager status nginx diff --git a/bin/install.sh b/bin/install.sh index e4b48bbe..a43bfae4 100755 --- a/bin/install.sh +++ b/bin/install.sh @@ -19,6 +19,8 @@ fi source $MNML_CONF +# sudo certbot certonly --nginx -d mnml.gg -d acp.mnml.gg + # /var/lib/mnml # contains img data, builds sudo mkdir -p /var/lib/mnml/client @@ -44,7 +46,7 @@ sudo cp $MNML_PATH/etc/systemd/system/mnml.service /usr/local/systemd/system/ sudo -u postgres createdb mnml sudo -u postgres createuser --encrypted mnml -echo "DATABASE_URL=postgres://mnml:$MNML_PG_PASSWORD@$MNML_PG_HOST/mnml" | sudo tee -a /etc/mnml/server.conf +echo "DATABASE_URL=postgres://mnml:$MNML_PG_PASSWORD@$MNML_PG_HOST/mnml" | sudo tee -a /etc/mnml/gs.conf sudo -u postgres psql -c "alter user mnml with encrypted password '$MNML_PG_PASSWORD';" cd $MNML_PATH/ops && npm run migrate diff --git a/client/assets/styles/styles.less b/client/assets/styles/styles.less index 76fe12bf..f234fcd0 100644 --- a/client/assets/styles/styles.less +++ b/client/assets/styles/styles.less @@ -74,6 +74,10 @@ figure { text-align: center; } +p { + margin-bottom: 1em; +} + #mnml { display: grid; grid-template-columns: minmax(min-content, 1fr) 8fr 1fr; @@ -189,11 +193,28 @@ button[disabled] { LOGIN */ -.login { - width: 50%; - display: flex; - flex-flow: column; - margin-bottom: 2em; +.welcome { + .highlight { + color: black; + background: @white; + border: 1px solid @white; + } + + .login { + width: 50%; + display: flex; + flex-flow: column; + margin-bottom: 2em; + } + + .options { + width: 50%; + display: flex; + flex-flow: row; + button { + flex: 1; + } + } h2 { margin-bottom: 0.5em; diff --git a/client/assets/styles/styles.mobile.css b/client/assets/styles/styles.mobile.css index 263761ff..c0db0c18 100644 --- a/client/assets/styles/styles.mobile.css +++ b/client/assets/styles/styles.mobile.css @@ -53,10 +53,19 @@ padding: 0 0.5em; } - .login { + .welcome .login { width: 100%; } + .welcome .options { + width: 100%; + flex-flow: row wrap; + } + + .welcome .options button { + flex: 1 0 50%; + } + .timer-container { margin: 0.5em 0 0 0; } diff --git a/client/index.html b/client/index.html index 88515382..453c4706 100644 --- a/client/index.html +++ b/client/index.html @@ -16,7 +16,10 @@ - + diff --git a/client/package.json b/client/package.json index a0c13d05..96196e8e 100644 --- a/client/package.json +++ b/client/package.json @@ -1,6 +1,6 @@ { "name": "mnml-client", - "version": "0.2.0", + "version": "0.3.0", "description": "", "main": "index.js", "scripts": { diff --git a/client/src/actions.jsx b/client/src/actions.jsx index 149dcb2a..1fde403a 100644 --- a/client/src/actions.jsx +++ b/client/src/actions.jsx @@ -16,6 +16,7 @@ export const setConstructs = value => ({ type: 'SET_CONSTRUCTS', value }); export const setConstructRename = value => ({ type: 'SET_CONSTRUCT_RENAME', value }); export const setGame = value => ({ type: 'SET_GAME', value }); export const setInfo = value => ({ type: 'SET_INFO', value }); +export const setEmail = value => ({ type: 'SET_EMAIL', value }); export const setInstance = value => ({ type: 'SET_INSTANCE', value }); export const setInstances = value => ({ type: 'SET_INSTANCES', value }); export const setItemEquip = value => ({ type: 'SET_ITEM_EQUIP', value }); diff --git a/client/src/components/account.management.jsx b/client/src/components/account.management.jsx index 3561094e..05a8b04b 100644 --- a/client/src/components/account.management.jsx +++ b/client/src/components/account.management.jsx @@ -12,6 +12,7 @@ const addState = connect( function receiveState(state) { const { account, + email, ping, ws, } = state; @@ -21,13 +22,24 @@ const addState = connect( postData('/account/password', { current, password }) .then(res => res.json()) .then(data => { - if (!data.success) return errorToast(data.error_message); + if (data.error) return errorToast(data.error); infoToast('Password changed. Reloading...') setTimeout(() => window.location.reload(), 5000); }) .catch(error => errorToast(error)); } + function setEmail(email) { + postData('/account/email', { email }) + .then(res => res.json()) + .then(data => { + if (data.error) return errorToast(data.error); + infoToast('Email set. Please confirm your address.'); + return true; + }) + .catch(error => errorToast(error)); + } + function logout() { postData('/account/logout').then(() => window.location.reload(true)); } @@ -40,8 +52,10 @@ const addState = connect( return { account, ping, + email, logout, setPassword, + setEmail, sendConstructSpawn, }; }, @@ -54,6 +68,7 @@ class AccountStatus extends Component { this.state = { setPassword: { current: '', password: '', confirm: ''}, + email: null, }; } @@ -61,8 +76,10 @@ class AccountStatus extends Component { const { account, ping, + email, logout, setPassword, + setEmail, sendConstructSpawn, } = args; @@ -84,24 +101,26 @@ class AccountStatus extends Component {
Subscription
{account.subscribed ? 'some date' : 'unsubscribed'}
- +
- +
-
Current Email
-
{account.email ? account.email : 'No email set'}
+
Recovery Email
+
{email ? email.email : 'No email set'}
Status
-
{account.email_confirmed ? 'Confirmed' : 'Unconfirmed'}
+
{email && email.confirmed ? 'Confirmed' : 'Unconfirmed'}
- +
diff --git a/client/src/components/inventory.jsx b/client/src/components/inventory.jsx index 4bb2b0a2..37cf55f1 100644 --- a/client/src/components/inventory.jsx +++ b/client/src/components/inventory.jsx @@ -52,14 +52,14 @@ function Inventory(args) { const useMtx = (item, i) => (
setMtxActive(item)} >
{item}
- +
); const availableMtx = (item, i) => (
mtxBuy(item)} >
{item.variant}
- +
); diff --git a/client/src/components/login.jsx b/client/src/components/login.jsx deleted file mode 100644 index 6b7eefd3..00000000 --- a/client/src/components/login.jsx +++ /dev/null @@ -1,170 +0,0 @@ -// eslint-disable-next-line -const preact = require('preact'); -const { Component } = require('preact') -const { connect } = require('preact-redux'); -const linkState = require('linkstate').default; - -const { postData, errorToast } = require('../utils'); - -const addState = connect( - (state) => { - const { - ws - } = state; - function submitLogin(name, password) { - postData('/account/login', { name, password }) - .then(res => res.json()) - .then(data => { - if (data.error) return errorToast(data.error); - console.log(data.response); - ws.connect(); - }) - .catch(error => errorToast(error)); - } - - function submitRegister(name, password, code) { - postData('/account/register', { name, password, code }) - .then(res => res.json()) - .then(data => { - if (data.error) return errorToast(data.error); - console.log(data.response); - ws.connect(); - }) - .catch(error => errorToast(error)); - } - - return { - submitLogin, - submitRegister, - } - }, -); - -class Login extends Component { - constructor(props) { - super(props); - - this.state = { - login: { name: '', password: '', code: ''}, - register: { name: '', password: '', confirm: '', code: ''}, - }; - - this.loginSubmit = this.loginSubmit.bind(this); - this.registerSubmit = this.registerSubmit.bind(this); - } - - loginSubmit(event) { - event.preventDefault(); - console.log(this.state); - this.props.submitLogin(this.state.login.name, this.state.login.password); - this.setState({ login: { name: '', password: '' }}); - } - - registerSubmit(event) { - event.preventDefault(); - this.props.submitRegister(this.state.register.name, this.state.register.password, this.state.register.code); - this.setState({ register: { name: '', password: '', confirm: '', code: ''}}); - } - - render() { - const registerConfirm = () => - this.state.register.password === this.state.register.confirm; - - const loginDisabled = () => { - const { password, name } = this.state.login; - return !(password && name); - } - - const registerDisabled = () => { - const { password, name, code } = this.state.register; - return !(registerConfirm() && password && name && code); - } - - return ( -
-

mnml.gg

- - - -
- ); - } -} - -module.exports = addState(Login); diff --git a/client/src/components/main.jsx b/client/src/components/main.jsx index 44948fe2..57abab03 100644 --- a/client/src/components/main.jsx +++ b/client/src/components/main.jsx @@ -2,7 +2,7 @@ const preact = require('preact'); const { connect } = require('preact-redux'); -const Login = require('./login'); +const Welcome = require('./welcome'); const Game = require('./game'); const Instance = require('./instance.component'); const Team = require('./team'); @@ -25,7 +25,7 @@ function Main(props) { } = props; if (!account) { - return ; + return ; } if (game) { diff --git a/client/src/components/welcome.about.jsx b/client/src/components/welcome.about.jsx new file mode 100644 index 00000000..0f33b942 --- /dev/null +++ b/client/src/components/welcome.about.jsx @@ -0,0 +1,69 @@ +// eslint-disable-next-line +const preact = require('preact'); +const { Component } = require('preact') +const { connect } = require('preact-redux'); +const linkState = require('linkstate').default; + +const { postData, errorToast, infoToast } = require('../utils'); + +const addState = connect( + (state) => { + const { + ws + } = state; + + function submitRegister(name, password, code) { + postData('/account/register', { name, password, code }) + .then(res => res.json()) + .then(data => { + if (data.error) return errorToast(data.error); + infoToast(data.message); + ws.connect(); + }) + .catch(error => errorToast(error)); + } + + return { + submitRegister, + } + }, +); + +function Register(args) { + const { + submitRegister, + navRegister, + } = args; + + return ( + + ); +} + +module.exports = addState(Register); diff --git a/client/src/components/welcome.help.jsx b/client/src/components/welcome.help.jsx new file mode 100644 index 00000000..b864e6a0 --- /dev/null +++ b/client/src/components/welcome.help.jsx @@ -0,0 +1,91 @@ +// eslint-disable-next-line +const preact = require('preact'); +const { Component } = require('preact') +const { connect } = require('preact-redux'); +const linkState = require('linkstate').default; + +const { postData, errorToast, infoToast } = require('../utils'); + +const addState = connect( + (state) => { + const { + ws + } = state; + + function submitRecover(email) { + postData('/account/recover', { email }) + .then(res => res.json()) + .then(data => { + if (data.error) return errorToast(data.error); + infoToast(data.message); + }) + .catch(error => errorToast(error)); + } + + return { + submitRecover, + } + }, +); + +const EMAIL_SUBJECT = name => ` +account support: ${name || 'CHANGEME'} +`; + +const EMAIL_BODY = ` +--- +include some details regarding your account. ie. +- account name +- construct names +--- +`; + +function Register(args) { + const { + submitRecover, + } = args; + + const { email } = this.state; + + const buttonSubmit = (event) => { + event.preventDefault(); + submitRecover(email); + // this.setState({ email: '' }); + }; + + const buttonDisabled = () => { + return !email; + }; + + const supportLink = encodeURI(`mailto:humans@mnml.gg?subject=${EMAIL_SUBJECT(email)}&body=${EMAIL_BODY}`); + + return ( + + ); +} + +module.exports = addState(Register); diff --git a/client/src/components/welcome.jsx b/client/src/components/welcome.jsx new file mode 100644 index 00000000..27575d98 --- /dev/null +++ b/client/src/components/welcome.jsx @@ -0,0 +1,61 @@ +// eslint-disable-next-line +const preact = require('preact'); + +const Login = require('./welcome.login'); +const Register = require('./welcome.register'); +const Help = require('./welcome.help'); +const About = require('./welcome.about'); + +function Welcome() { + const page = this.state.page || 'login'; + + const navRegister = () => this.setState({ page: 'register' }); + const pageEl = () => { + if (page === 'login') return ; + if (page === 'register') return ; + if (page === 'about') return ; + if (page === 'help') return ; + return false; + }; + + return ( +
+

mnml.gg

+ +
+ + + + +
+ {pageEl()} +
+ ); +} + +module.exports = Welcome; diff --git a/client/src/components/welcome.login.jsx b/client/src/components/welcome.login.jsx new file mode 100644 index 00000000..aa65f20d --- /dev/null +++ b/client/src/components/welcome.login.jsx @@ -0,0 +1,80 @@ +// eslint-disable-next-line +const preact = require('preact'); +const { Component } = require('preact') +const { connect } = require('preact-redux'); +const linkState = require('linkstate').default; + +const { postData, errorToast } = require('../utils'); + +const addState = connect( + (state) => { + const { + ws + } = state; + function submitLogin(name, password) { + postData('/account/login', { name, password }) + .then(res => res.json()) + .then(data => { + if (data.error) return errorToast(data.error); + console.log(data.message); + ws.connect(); + }) + .catch(error => errorToast(error)); + } + + return { + submitLogin, + } + }, +); + + +function Login(args) { + const { + submitLogin, + } = args; + + const { password, name } = this.state; + + const loginSubmit = (event) => { + event.preventDefault(); + submitLogin(name, password); + this.setState({ name: '', password: '' }); + }; + + const loginDisabled = () => { + return !(password && name); + }; + + return ( + + ); +} + +module.exports = addState(Login); diff --git a/client/src/components/welcome.register.jsx b/client/src/components/welcome.register.jsx new file mode 100644 index 00000000..7051a8ce --- /dev/null +++ b/client/src/components/welcome.register.jsx @@ -0,0 +1,96 @@ +// eslint-disable-next-line +const preact = require('preact'); +const { Component } = require('preact') +const { connect } = require('preact-redux'); +const linkState = require('linkstate').default; + +const { postData, errorToast, infoToast } = require('../utils'); + +const addState = connect( + (state) => { + const { + ws + } = state; + + function submitRegister(name, password, code) { + postData('/account/register', { name, password, code }) + .then(res => res.json()) + .then(data => { + if (data.error) return errorToast(data.error); + infoToast(data.message); + ws.connect(); + }) + .catch(error => errorToast(error)); + } + + return { + submitRegister, + } + }, +); + +function Register(args) { + const { + submitRegister, + } = args; + + const { password, confirm, name, code } = this.state; + + const registerSubmit = (event) => { + event.preventDefault(); + submitRegister(name, password, code); + // this.setState({ name: '', password: '', confirm: '', code: ''}); + } + + const registerConfirm = () => + password === confirm; + + const registerDisabled = () => { + return !(registerConfirm() && password && name && code); + } + + return ( + + ); +} + +module.exports = addState(Register); diff --git a/client/src/events.jsx b/client/src/events.jsx index da487ef8..15f9f35b 100644 --- a/client/src/events.jsx +++ b/client/src/events.jsx @@ -101,6 +101,10 @@ function registerEvents(store) { store.dispatch(actions.setAccount(account)); } + function setEmail(email) { + store.dispatch(actions.setEmail(email)); + } + function setShop(v) { store.dispatch(actions.setShop(v)); } @@ -212,6 +216,7 @@ function registerEvents(store) { setConstructList, setNewConstruct, setGame, + setEmail, setInstance, setItemInfo, setPing, diff --git a/client/src/reducers.jsx b/client/src/reducers.jsx index f816befd..8c9beb5b 100644 --- a/client/src/reducers.jsx +++ b/client/src/reducers.jsx @@ -28,6 +28,7 @@ module.exports = { constructEditId: createReducer(null, 'SET_CONSTRUCT_EDIT_ID'), constructRename: createReducer(null, 'SET_CONSTRUCT_RENAME'), game: createReducer(null, 'SET_GAME'), + email: createReducer(null, 'SET_EMAIL'), info: createReducer(null, 'SET_INFO'), instance: createReducer(null, 'SET_INSTANCE'), instances: createReducer([], 'SET_INSTANCES'), diff --git a/client/src/socket.jsx b/client/src/socket.jsx index 78ebd771..4fe78845 100644 --- a/client/src/socket.jsx +++ b/client/src/socket.jsx @@ -1,7 +1,11 @@ const toast = require('izitoast'); const cbor = require('borc'); -const SOCKET_URL = process.env.NODE_ENV === 'production' ? 'wss://mnml.gg/api/ws' : 'ws://localhost/api/ws'; +const socketUrl = () => { + if (process.env.NODE_ENV === 'production') return 'wss://mnml.gg/api/ws'; + if (process.env.NODE_ENV === 'staging') return 'wss://sixtysix.pro/api/ws'; + return 'ws://localhost/api/ws'; +}; function errorToast(err) { console.error(err); @@ -146,6 +150,10 @@ function createSocket(events) { events.setShop(shop); } + function onEmailState(v) { + events.setEmail(v); + } + function onAccountInstances(list) { events.setAccountInstances(list); } @@ -194,6 +202,7 @@ function createSocket(events) { AccountShop: onAccountShop, ConstructSpawn: onConstructSpawn, GameState: onGameState, + EmailState: onEmailState, InstanceState: onInstanceState, ItemInfo: onItemInfo, Pong: onPong, @@ -271,7 +280,7 @@ function createSocket(events) { ws = null; } - ws = new WebSocket(SOCKET_URL); + ws = new WebSocket(socketUrl()); ws.binaryType = 'arraybuffer'; // Listen for messages diff --git a/etc/mnml/gs.SAMPLE.conf b/etc/mnml/gs.SAMPLE.conf new file mode 100644 index 00000000..20435945 --- /dev/null +++ b/etc/mnml/gs.SAMPLE.conf @@ -0,0 +1,5 @@ +DATABASE_URL=postgres://mnml:password@somewhere/mnml + +MAIL_ADDRESS=machines@mnml.gg +MAIL_DOMAIN=vinyl.mnml.gg +MAIL_PASSWORD=mmmmmmmmmmmmmmmm diff --git a/etc/nginx/sites-available/mnml.gg.PRODUCTION.nginx.conf b/etc/nginx/sites-available/mnml.gg.PRODUCTION.nginx.conf index 875a10d7..cce73707 100644 --- a/etc/nginx/sites-available/mnml.gg.PRODUCTION.nginx.conf +++ b/etc/nginx/sites-available/mnml.gg.PRODUCTION.nginx.conf @@ -14,6 +14,8 @@ map $http_upgrade $connection_upgrade { # DEV server { + server_name mnml.gg; + location / { root /var/lib/mnml/public/current; index index.html; @@ -46,6 +48,23 @@ server { ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot } +server { + server_name acp.mnml.gg; + + location / { + root /var/lib/mnml/public/current/; + index acp.html; + try_files $uri $uri/ acp.html; + } + + listen 443 ssl; # managed by Certbot + ssl_certificate /etc/letsencrypt/live/mnml.gg/fullchain.pem; # managed by Certbot + ssl_certificate_key /etc/letsencrypt/live/mnml.gg/privkey.pem; # managed by Certbot + include /etc/letsencrypt/options-ssl-nginx.conf; # managed by Certbot + ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem; # managed by Certbot +} + + # http -> https server { server_name mnml.gg; @@ -56,8 +75,3 @@ server { server_name minimal.gg; return 301 https://mnml.gg$request_uri; } - -server { - server_name cryps.gg; - return 301 https://mnml.gg$request_uri; -} diff --git a/etc/systemd/system/mnml.service b/etc/systemd/system/mnml.service index fd859f8d..1fb13097 100644 --- a/etc/systemd/system/mnml.service +++ b/etc/systemd/system/mnml.service @@ -1,6 +1,5 @@ [Unit] Description=mnml game server -After=postgresql User=mnml [Service] diff --git a/etc/telegraf/telegraf.conf b/etc/telegraf/telegraf.conf new file mode 100644 index 00000000..9d994fd3 --- /dev/null +++ b/etc/telegraf/telegraf.conf @@ -0,0 +1,305 @@ +# Telegraf Configuration +# +# Telegraf is entirely plugin driven. All metrics are gathered from the +# declared inputs, and sent to the declared outputs. +# +# Plugins must be declared in here to be active. +# To deactivate a plugin, comment out the name and any variables. +# +# Use 'telegraf -config telegraf.conf -test' to see what metrics a config +# file would generate. +# +# Environment variables can be used anywhere in this config file, simply surround +# them with ${}. For strings the variable must be within quotes (ie, "${STR_VAR}"), +# for numbers and booleans they should be plain (ie, ${INT_VAR}, ${BOOL_VAR}) + + +# Global tags can be specified here in key="value" format. +[global_tags] + # dc = "us-east-1" # will tag all metrics with dc=us-east-1 + # rack = "1a" + ## Environment variables can be used as tags, and throughout the config file + # user = "$USER" + + +# Configuration for telegraf agent +[agent] + ## Default data collection interval for all inputs + interval = "10s" + ## Rounds collection interval to 'interval' + ## ie, if interval="10s" then always collect on :00, :10, :20, etc. + round_interval = true + + ## Telegraf will send metrics to outputs in batches of at most + ## metric_batch_size metrics. + ## This controls the size of writes that Telegraf sends to output plugins. + metric_batch_size = 1000 + + ## Maximum number of unwritten metrics per output. + metric_buffer_limit = 10000 + + ## Collection jitter is used to jitter the collection by a random amount. + ## Each plugin will sleep for a random time within jitter before collecting. + ## This can be used to avoid many plugins querying things like sysfs at the + ## same time, which can have a measurable effect on the system. + collection_jitter = "0s" + + ## Default flushing interval for all outputs. Maximum flush_interval will be + ## flush_interval + flush_jitter + flush_interval = "10s" + ## Jitter the flush interval by a random amount. This is primarily to avoid + ## large write spikes for users running a large number of telegraf instances. + ## ie, a jitter of 5s and interval 10s means flushes will happen every 10-15s + flush_jitter = "0s" + + ## By default or when set to "0s", precision will be set to the same + ## timestamp order as the collection interval, with the maximum being 1s. + ## ie, when interval = "10s", precision will be "1s" + ## when interval = "250ms", precision will be "1ms" + ## Precision will NOT be used for service inputs. It is up to each individual + ## service input to set the timestamp at the appropriate precision. + ## Valid time units are "ns", "us" (or "µs"), "ms", "s". + precision = "" + + ## Log at debug level. + # debug = false + ## Log only error level messages. + # quiet = false + + ## Log file name, the empty string means to log to stderr. + # logfile = "" + + ## The logfile will be rotated after the time interval specified. When set + ## to 0 no time based rotation is performed. Logs are rotated only when + ## written to, if there is no log activity rotation may be delayed. + # logfile_rotation_interval = "0d" + + ## The logfile will be rotated when it becomes larger than the specified + ## size. When set to 0 no size based rotation is performed. + # logfile_rotation_max_size = "0MB" + + ## Maximum number of rotated archives to keep, any older logs are deleted. + ## If set to -1, no archives are removed. + # logfile_rotation_max_archives = 5 + + ## Override default hostname, if empty use os.Hostname() + hostname = "" + ## If set to true, do no set the "host" tag in the telegraf agent. + omit_hostname = false + + +############################################################################### +# OUTPUT PLUGINS # +############################################################################### + + +# Configuration for sending metrics to InfluxDB +[[outputs.influxdb]] + ## The full HTTP or UDP URL for your InfluxDB instance. + ## + ## Multiple URLs can be specified for a single cluster, only ONE of the + ## urls will be written to each interval. + # urls = ["unix:///var/run/influxdb.sock"] + # urls = ["udp://127.0.0.1:8089"] + urls = ["http://mnml-prod-elk:8086"] + + ## The target database for metrics; will be created as needed. + ## For UDP url endpoint database needs to be configured on server side. + # database = "telegraf" + + ## The value of this tag will be used to determine the database. If this + ## tag is not set the 'database' option is used as the default. + # database_tag = "" + + ## If true, the database tag will not be added to the metric. + # exclude_database_tag = false + + ## If true, no CREATE DATABASE queries will be sent. Set to true when using + ## Telegraf with a user without permissions to create databases or when the + ## database already exists. + # skip_database_creation = false + + ## Name of existing retention policy to write to. Empty string writes to + ## the default retention policy. Only takes effect when using HTTP. + # retention_policy = "" + + ## Write consistency (clusters only), can be: "any", "one", "quorum", "all". + ## Only takes effect when using HTTP. + # write_consistency = "any" + + ## Timeout for HTTP messages. + # timeout = "5s" + + ## HTTP Basic Auth + # username = "telegraf" + # password = "metricsmetricsmetricsmetrics" + + ## HTTP User-Agent + # user_agent = "telegraf" + + ## UDP payload size is the maximum packet size to send. + # udp_payload = "512B" + + ## Optional TLS Config for use on HTTP connections. + # tls_ca = "/etc/telegraf/ca.pem" + # tls_cert = "/etc/telegraf/cert.pem" + # tls_key = "/etc/telegraf/key.pem" + ## Use TLS but skip chain & host verification + # insecure_skip_verify = false + + ## HTTP Proxy override, if unset values the standard proxy environment + ## variables are consulted to determine which proxy, if any, should be used. + # http_proxy = "http://corporate.proxy:3128" + + ## Additional HTTP headers + # http_headers = {"X-Special-Header" = "Special-Value"} + + ## HTTP Content-Encoding for write request body, can be set to "gzip" to + ## compress body or "identity" to apply no encoding. + # content_encoding = "identity" + + ## When true, Telegraf will output unsigned integers as unsigned values, + ## i.e.: "42u". You will need a version of InfluxDB supporting unsigned + ## integer values. Enabling this option will result in field type errors if + ## existing data has been written. + # influx_uint_support = false + +# # Configuration for sending metrics to InfluxDB +# [[outputs.influxdb_v2]] +# ## The URLs of the InfluxDB cluster nodes. +# ## +# ## Multiple URLs can be specified for a single cluster, only ONE of the +# ## urls will be written to each interval. +# urls = ["http://127.0.0.1:9999"] +# +# ## Token for authentication. +# token = "" +# +# ## Organization is the name of the organization you wish to write to; must exist. +# organization = "" +# +# ## Destination bucket to write into. +# bucket = "" +# +# ## The value of this tag will be used to determine the bucket. If this +# ## tag is not set the 'bucket' option is used as the default. +# # bucket_tag = "" +# +# ## If true, the bucket tag will not be added to the metric. +# # exclude_bucket_tag = false +# +# ## Timeout for HTTP messages. +# # timeout = "5s" +# +# ## Additional HTTP headers +# # http_headers = {"X-Special-Header" = "Special-Value"} +# +# ## HTTP Proxy override, if unset values the standard proxy environment +# ## variables are consulted to determine which proxy, if any, should be used. +# # http_proxy = "http://corporate.proxy:3128" +# +# ## HTTP User-Agent +# # user_agent = "telegraf" +# +# ## Content-Encoding for write request body, can be set to "gzip" to +# ## compress body or "identity" to apply no encoding. +# # content_encoding = "gzip" +# +# ## Enable or disable uint support for writing uints influxdb 2.0. +# # influx_uint_support = false +# +# ## Optional TLS Config for use on HTTP connections. +# # tls_ca = "/etc/telegraf/ca.pem" +# # tls_cert = "/etc/telegraf/cert.pem" +# # tls_key = "/etc/telegraf/key.pem" +# ## Use TLS but skip chain & host verification +# # insecure_skip_verify = false + +############################################################################### +# INPUT PLUGINS # +############################################################################### + +# Read metrics about cpu usage +[[inputs.cpu]] + ## Whether to report per-cpu stats or not + percpu = true + ## Whether to report total system cpu stats or not + totalcpu = true + ## If true, collect raw CPU time metrics. + collect_cpu_time = false + ## If true, compute and report the sum of all non-idle CPU states. + report_active = false + + +# Read metrics about disk usage by mount point +[[inputs.disk]] + ## By default stats will be gathered for all mount points. + ## Set mount_points will restrict the stats to only the specified mount points. + # mount_points = ["/"] + + ## Ignore mount points by filesystem type. + ignore_fs = ["tmpfs", "devtmpfs", "devfs", "iso9660", "overlay", "aufs", "squashfs"] + + +# Read metrics about disk IO by device +[[inputs.diskio]] + ## By default, telegraf will gather stats for all devices including + ## disk partitions. + ## Setting devices will restrict the stats to the specified devices. + # devices = ["sda", "sdb", "vd*"] + ## Uncomment the following line if you need disk serial numbers. + # skip_serial_number = false + # + ## On systems which support it, device metadata can be added in the form of + ## tags. + ## Currently only Linux is supported via udev properties. You can view + ## available properties for a device by running: + ## 'udevadm info -q property -n /dev/sda' + ## Note: Most, but not all, udev properties can be accessed this way. Properties + ## that are currently inaccessible include DEVTYPE, DEVNAME, and DEVPATH. + # device_tags = ["ID_FS_TYPE", "ID_FS_USAGE"] + # + ## Using the same metadata source as device_tags, you can also customize the + ## name of the device via templates. + ## The 'name_templates' parameter is a list of templates to try and apply to + ## the device. The template may contain variables in the form of '$PROPERTY' or + ## '${PROPERTY}'. The first template which does not contain any variables not + ## present for the device is used as the device name tag. + ## The typical use case is for LVM volumes, to get the VG/LV name instead of + ## the near-meaningless DM-0 name. + # name_templates = ["$ID_FS_LABEL","$DM_VG_NAME/$DM_LV_NAME"] + + +# Get kernel statistics from /proc/stat +[[inputs.kernel]] + # no configuration + + +# Read metrics about memory usage +[[inputs.mem]] + # no configuration + + +# Get the number of processes and group them by status +[[inputs.processes]] + # no configuration + + +# Read metrics about swap memory usage +[[inputs.swap]] + # no configuration + + +# Read metrics about system load & uptime +[[inputs.system]] + ## Uncomment to remove deprecated metrics. + # fielddrop = ["uptime_format"] + +[[inputs.tail]] + files = ["/var/log/mnml/mnml.log"] + name_override = "mnml_log" + data_format = "json" + json_time_key = "time" + json_time_format = "2006-01-02T15:04:05.999999999Z07:00" + json_string_fields = ["level", "module", "msg"] + diff --git a/ops/.gitignore b/ops/.gitignore index 2a51843a..30d6c1cb 100755 --- a/ops/.gitignore +++ b/ops/.gitignore @@ -2,3 +2,5 @@ node_modules/ package-lock.json molecules/ bulk.sdf + +knexfile.js diff --git a/ops/knexfile.js b/ops/knexfile.SAMPLE.js old mode 100755 new mode 100644 similarity index 100% rename from ops/knexfile.js rename to ops/knexfile.SAMPLE.js diff --git a/ops/migrations/20180913000513_create_accounts.js b/ops/migrations/20180913000513_create_accounts.js index 06961ad3..17eddca3 100755 --- a/ops/migrations/20180913000513_create_accounts.js +++ b/ops/migrations/20180913000513_create_accounts.js @@ -6,7 +6,10 @@ exports.up = async knex => { table.string('name', 42).notNullable().unique(); table.string('password').notNullable(); - table.string('token', 64).notNullable(); + table.string('token', 64) + .notNullable() + .index(); + table.timestamp('token_expiry').notNullable(); table.bigInteger('balance') @@ -18,7 +21,6 @@ exports.up = async knex => { .notNullable(); table.index('name'); - table.index('id'); }); await knex.schema.raw(` diff --git a/ops/migrations/20190825172701_email.js b/ops/migrations/20190825172701_email.js new file mode 100644 index 00000000..3c69f92b --- /dev/null +++ b/ops/migrations/20190825172701_email.js @@ -0,0 +1,43 @@ +exports.up = async knex => { + await knex.schema.createTable('emails', table => { + table.timestamps(true, true); + + table.uuid('id') + .primary() + .index(); + + table.uuid('account') + .notNullable() + .index(); + + table.foreign('account') + .references('id') + .inTable('accounts') + .onDelete('CASCADE'); + + table.string('email', 128) + .unique() + .notNullable() + .index(); + + table.string('confirm_token', 64) + .notNullable() + .index(); + + table.string('recover_token', 64) + .notNullable() + .index(); + + table.timestamp('recover_token_expiry') + .notNullable() + .defaultTo(knex.fn.now()); + + table.bool('confirmed') + .notNullable() + .defaultTo(false); + }); + + return true; +}; + +exports.down = async () => {}; \ No newline at end of file diff --git a/ops/package.json b/ops/package.json index c56c6e06..2b7cd67a 100755 --- a/ops/package.json +++ b/ops/package.json @@ -1,6 +1,6 @@ { "name": "mnml-ops", - "version": "0.2.0", + "version": "0.3.0", "description": "", "main": "index.js", "scripts": { diff --git a/server/Cargo.toml b/server/Cargo.toml index 392a93cd..f66c0f87 100644 --- a/server/Cargo.toml +++ b/server/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "mnml" -version = "0.2.0" +version = "0.3.0" authors = ["ntr "] [dependencies] @@ -23,10 +23,11 @@ failure = "0.1" dotenv = "0.9.0" log = "0.4" -fern = "0.5" +fern = { version = "0.5", features = ["colored"] } iron = "0.6" bodyparser = "0.8" +urlencoded = "0.6" persistent = "0.4" router = "0.6" mount = "0.4" @@ -34,4 +35,7 @@ cookie = "0.12" crossbeam-channel = "0.3" ws = "0.8" +lettre = "0.9" +lettre_email = "0.9" + stripe-rust = { version = "0.10.4", features = ["webhooks"] } diff --git a/server/src/account.rs b/server/src/account.rs index 562cae11..a33b6b02 100644 --- a/server/src/account.rs +++ b/server/src/account.rs @@ -78,7 +78,7 @@ pub fn select_name(db: &Db, name: &String) -> Result { Account::try_from(row) } -pub fn from_token(db: &Db, token: String) -> Result { +pub fn from_token(db: &Db, token: &String) -> Result { let query = " SELECT id, name, subscribed, balance FROM accounts @@ -87,7 +87,7 @@ pub fn from_token(db: &Db, token: String) -> Result { "; let result = db - .query(query, &[&token])?; + .query(query, &[token])?; let row = result.iter().next() .ok_or(err_msg("invalid token"))?; diff --git a/server/src/construct.rs b/server/src/construct.rs index 5abb3614..652acb1b 100644 --- a/server/src/construct.rs +++ b/server/src/construct.rs @@ -873,7 +873,7 @@ pub fn construct_spawn(tx: &mut Transaction, account: Uuid, name: String, team: img::molecular_write(construct.img)?; - info!("spawned construct account={:} construct={:?}", account, construct); + info!("spawned construct account={:} name={:?}", account, construct.name); return Ok(construct); } diff --git a/server/src/events.rs b/server/src/events.rs index 95cf18e6..eb5e12d5 100644 --- a/server/src/events.rs +++ b/server/src/events.rs @@ -15,6 +15,7 @@ use instance; use pg::{Db, PgPool}; use rpc::RpcMessage; use warden::{GameEvent}; +use mail::Mail; pub type EventsTx = Sender; type Id = usize; @@ -34,6 +35,7 @@ pub struct Events { pub tx: Sender, rx: Receiver, + mail: Sender, warden: Sender, queue: Option, @@ -62,11 +64,12 @@ struct WsClient { } impl Events { - pub fn new(tx: Sender, rx: Receiver, warden: Sender) -> Events { + pub fn new(tx: Sender, rx: Receiver, warden: Sender, mail: Sender) -> Events { Events { tx, rx, warden, + mail, queue: None, clients: HashMap::new(), } @@ -150,7 +153,7 @@ impl Events { }, Event::Push(id, msg) => { - info!("push id={:?} msg={:?}", id, msg); + info!("push id={:?}", id); let mut subs = 0; let mut dead = vec![]; diff --git a/server/src/http.rs b/server/src/http.rs index cc6f3d0a..592a06d0 100644 --- a/server/src/http.rs +++ b/server/src/http.rs @@ -6,14 +6,20 @@ use iron::prelude::*; use iron::status; use iron::typemap::Key; use iron::mime::Mime; +use iron::modifiers::Redirect; +use iron::Url; use iron::{typemap, BeforeMiddleware,AfterMiddleware}; -use persistent::Read; +use urlencoded::UrlEncodedQuery; +use persistent::{Read, Write}; use router::Router; use mount::{Mount}; use serde::{Serialize, Deserialize}; +use lettre::{SendableEmail, SmtpClient, SmtpTransport, Transport}; use acp; use account; +use mail; +use mail::Mail; use pg::PgPool; use payments::{stripe}; @@ -51,19 +57,22 @@ pub enum MnmlHttpError { } impl From for MnmlHttpError { - fn from(_err: bcrypt::BcryptError) -> Self { + fn from(err: bcrypt::BcryptError) -> Self { + warn!("{:?}", err); MnmlHttpError::ServerError } } impl From for MnmlHttpError { - fn from(_err: postgres::Error) -> Self { + fn from(err: postgres::Error) -> Self { + warn!("{:?}", err); MnmlHttpError::DbError } } impl From for MnmlHttpError { - fn from(_err: r2d2::Error) -> Self { + fn from(err: r2d2::Error) -> Self { + warn!("{:?}", err); MnmlHttpError::DbError } } @@ -133,9 +142,9 @@ impl BeforeMiddleware for AuthMiddleware { // got auth token if cookie.name() == TOKEN_HEADER { - match account::from_token(&db, cookie.value().to_string()) { + match account::from_token(&db, &cookie.value().to_string()) { Ok(a) => req.extensions.insert::(a), - Err(_) => return Err(IronError::from(MnmlHttpError::TokenDoesNotMatch)), + Err(_) => return Err(MnmlHttpError::TokenDoesNotMatch.into()), }; } } @@ -195,7 +204,7 @@ fn register(req: &mut Request) -> IronResult { let state = req.get::>().unwrap(); let params = match req.get::>() { Ok(Some(b)) => b, - _ => return Err(IronError::from(MnmlHttpError::BadRequest)), + _ => return Err(MnmlHttpError::BadRequest.into()), }; let db = state.pool.get().or(Err(MnmlHttpError::DbError))?; @@ -208,7 +217,7 @@ fn register(req: &mut Request) -> IronResult { }, Err(e) => { warn!("{:?}", e); - Err(IronError::from(e)) + Err(e.into()) } } } @@ -223,7 +232,7 @@ fn login(req: &mut Request) -> IronResult { let state = req.get::>().unwrap(); let params = match req.get::>() { Ok(Some(b)) => b, - _ => return Err(IronError::from(MnmlHttpError::BadRequest)), + _ => return Err(MnmlHttpError::BadRequest.into()), }; let db = state.pool.get().or(Err(MnmlHttpError::DbError))?; @@ -237,7 +246,7 @@ fn login(req: &mut Request) -> IronResult { }, Err(e) => { warn!("{:?}", e); - Err(IronError::from(e)) + Err(e.into()) } } } @@ -256,12 +265,95 @@ fn logout(req: &mut Request) -> IronResult { let mut res = json_response(status::Ok, Json::Message("logged out".to_string())); res.headers.set(SetCookie(vec![AUTH_CLEAR.to_string()])); Ok(res) - }, - None => Err(IronError::from(MnmlHttpError::Unauthorized)), + None => Err(MnmlHttpError::Unauthorized.into()), } } +fn recover_set(req: &mut Request) -> IronResult { + let state = req.get::>().unwrap(); + let params = match req.get::>() { + Ok(Some(b)) => b, + _ => return Err(MnmlHttpError::BadRequest.into()), + }; + + let db = state.pool.get().or(Err(MnmlHttpError::DbError))?; + let mut tx = db.transaction().or(Err(MnmlHttpError::DbError))?; + + let user_email = match mail::select(&db, ¶ms.email) { + Ok(e) => match e.confirmed { + true => e, + false => return Ok(json_response(status::NotFound, + Json::Error("your email is not confirmed.\nplease contact support at humans@mnml.gg".to_string()))), + }, + Err(_e) => return Ok(json_response(status::NotFound, + Json::Error("email not registered.\nplease contact support at humans@mnml.gg".to_string()))), + }; + + let account = account::select(&db, user_email.account) + .or(Err(MnmlHttpError::NotFound))?; + + let token = mail::set_recovery(&mut tx, &user_email.email) + .or(Err(MnmlHttpError::ServerError))?; + + let app_mailer = req.get::>().unwrap(); + let mut lock = app_mailer.lock().unwrap(); + let message = Mail::Recover { email: user_email.email.clone(), name: account.name.clone(), token }; + + let send = match mail::send_mail(&mut lock.mailer, message) { + Ok(send) => send, + Err(e) => { + warn!("{:?}", e); + return Err(MnmlHttpError::ServerError.into()); + } + }; + + tx.commit().or(Err(MnmlHttpError::ServerError))?; + + info!("recovery email sent send={:?} account={:?} email={:?}", send, account, user_email.email); + Ok(json_response(status::Ok, Json::Message("recovery email sent. check your mailbox for access".to_string()))) +} + +fn recover(req: &mut Request) -> IronResult { + let state = req.get::>().unwrap(); + let db = state.pool.get().or(Err(MnmlHttpError::DbError))?; + let mut tx = db.transaction().or(Err(MnmlHttpError::DbError))?; + let token = match req.get_ref::() { + Ok(ref hashmap) => { + match hashmap.get("recover_token") { + Some(t) => &t[0], + None => return Err(MnmlHttpError::BadRequest.into()), + } + }, + Err(_) => return Err(MnmlHttpError::BadRequest.into()), + }; + + let user_email = match mail::get_recovery(&mut tx, &token.to_string()) { + Ok(a) => a, + Err(_) => return Err(MnmlHttpError::Unauthorized.into()), + }; + + let token = account::new_token(&mut tx, user_email.account) + .or(Err(MnmlHttpError::ServerError))?; + + let account = account::from_token(&db, &token) + .or(Err(MnmlHttpError::ServerError))?; + + let v = Cookie::build(TOKEN_HEADER, token) + .http_only(true) + .same_site(SameSite::Strict) + .path("/") + .max_age(Duration::weeks(1)) // 1 week aligns with db set + .finish(); + + tx.commit().or(Err(MnmlHttpError::ServerError))?; + let mut res = Response::with((status::SeeOther, Redirect(Url::parse("https://mnml.gg").unwrap()))); + res.headers.set(SetCookie(vec![v.to_string()])); + + info!("recovered account account={:?}", account); + Ok(res) +} + #[derive(Debug,Clone,Deserialize)] struct SetPassword { current: String, @@ -272,7 +364,7 @@ fn set_password(req: &mut Request) -> IronResult { let state = req.get::>().unwrap(); let params = match req.get::>() { Ok(Some(b)) => b, - _ => return Err(IronError::from(MnmlHttpError::BadRequest)), + _ => return Err(MnmlHttpError::BadRequest.into()), }; match req.extensions.get::() { @@ -286,7 +378,87 @@ fn set_password(req: &mut Request) -> IronResult { Ok(token_res(token)) }, - None => Err(IronError::from(MnmlHttpError::Unauthorized)), + None => Err(MnmlHttpError::Unauthorized.into()), + } +} + +#[derive(Debug,Clone,Deserialize)] +struct EmailPost { + email: String, +} + +fn email_set(req: &mut Request) -> IronResult { + let state = req.get::>().unwrap(); + let params = match req.get::>() { + Ok(Some(b)) => b, + _ => return Err(MnmlHttpError::BadRequest.into()), + }; + + let db = state.pool.get().or(Err(MnmlHttpError::DbError))?; + let mut tx = db.transaction().or(Err(MnmlHttpError::DbError))?; + + let (email, account, token) = match req.extensions.get::() { + Some(a) => { + let (_id, token) = match mail::set(&mut tx, a.id, ¶ms.email) { + Ok(res) => res, + Err(e) => { + warn!("{:?}", e); + return Err(MnmlHttpError::ServerError.into()); + }, + }; + + (params.email.clone(), a.clone(), token) + }, + None => return Err(MnmlHttpError::Unauthorized.into()), + }; + + let app_mailer = req.get::>().unwrap(); + let mut lock = app_mailer.lock().unwrap(); + let message = Mail::Confirm { email: email.clone(), name: account.name.clone(), token }; + + let send = match mail::send_mail(&mut lock.mailer, message) { + Ok(send) => send, + Err(e) => { + warn!("{:?}", e); + return Err(MnmlHttpError::ServerError.into()); + } + }; + + tx.commit().or(Err(MnmlHttpError::ServerError))?; + + info!("confirmation email sent send={:?} account={:?} email={:?}", send, account, email); + Ok(json_response(status::Ok, Json::Message("email set. confirmation required".to_string()))) +} + +fn email_confirm(req: &mut Request) -> IronResult { + let state = req.get::>().unwrap(); + + let account = match req.extensions.get::() { + Some(a) => a.clone(), + None => return Err(MnmlHttpError::Unauthorized.into()), + }; + + match req.get_ref::() { + Ok(ref hashmap) => { + let db = state.pool.get().or(Err(MnmlHttpError::DbError))?; + let mut tx = db.transaction().or(Err(MnmlHttpError::DbError))?; + + let token = match hashmap.get("confirm_token") { + Some(t) => &t[0], + None => return Err(MnmlHttpError::BadRequest.into()), + }; + + let confirmation = match mail::confirm_email(&mut tx, &account, token.to_string()) { + Ok(c) => c, + Err(_) => return Err(MnmlHttpError::NotFound.into()) + }; + + info!("email confirmed email={:?} account={:?}", confirmation.0, account); + + tx.commit().or(Err(MnmlHttpError::ServerError))?; + Ok(Response::with((status::Found, Redirect(Url::parse("https://mnml.gg").unwrap())))) + }, + Err(_) => Err(MnmlHttpError::BadRequest.into()), } } @@ -294,10 +466,14 @@ const MAX_BODY_LENGTH: usize = 1024 * 1024 * 10; pub struct State { pub pool: PgPool, - // pub events: Events, +} + +pub struct Mailer { + pub mailer: SmtpTransport, } impl Key for State { type Value = State; } +impl Key for Mailer { type Value = Mailer; } fn account_mount() -> Router { let mut router = Router::new(); @@ -306,7 +482,12 @@ fn account_mount() -> Router { router.post("logout", logout, "logout"); router.post("register", register, "register"); router.post("password", set_password, "set_password"); - router.post("email", logout, "email"); + router.post("email", email_set, "email_set"); + router.post("recover", recover_set, "recover_set"); + + // it is sent in an email... + router.get("email/confirm", email_confirm, "email_confirm"); + router.get("recover", recover, "recover"); router } @@ -318,7 +499,7 @@ fn payment_mount() -> Router { router } -pub fn start(pool: PgPool) { +pub fn start(pool: PgPool, mailer: SmtpTransport) { let mut mounts = Mount::new(); mounts.mount("/api/account/", account_mount()); @@ -327,6 +508,7 @@ pub fn start(pool: PgPool) { let mut chain = Chain::new(mounts); chain.link(Read::::both(State { pool })); + chain.link(Write::::both(Mailer { mailer })); chain.link_before(Read::::one(MAX_BODY_LENGTH)); chain.link_before(AuthMiddleware); chain.link_after(ErrorHandler); diff --git a/server/src/mail.rs b/server/src/mail.rs new file mode 100644 index 00000000..0fdffc7b --- /dev/null +++ b/server/src/mail.rs @@ -0,0 +1,301 @@ +use std::env; + +use uuid::Uuid; +use rand::{thread_rng, Rng}; +use rand::distributions::Alphanumeric; +use std::iter; +use postgres::transaction::Transaction; + +use failure::Error; +use failure::{err_msg, format_err}; + +use crossbeam_channel::Receiver; +use lettre::smtp::authentication::{Credentials, Mechanism}; +use lettre::smtp::ConnectionReuseParameters; +use lettre::smtp::error::Error as MailError; +use lettre::smtp::extension::ClientId; +use lettre::smtp::response::Response; +use lettre::{SendableEmail, SmtpClient, SmtpTransport, Transport}; +use lettre_email::Email as LettreEmail; + +use account::Account; +use pg::Db; + +#[derive(Debug,Clone,Serialize)] +pub struct Email { + pub id: Uuid, + pub email: String, + pub account: Uuid, + pub confirmed: bool, +} + +#[derive(Debug)] +pub enum Mail { + Recover { email: String, name: String, token: String }, + Confirm { email: String, name: String, token: String }, +} + +// create link that will set a token +// put msg saying pls reset your password +// redirect to main page cause cbf + +fn recover(email: &String, name: &String, token: &String) -> SendableEmail { + let body = format!("{:}, +the link below will recover your account. +please change your password immediately in the account page. +this link will expire in 48 hours or once used. + +http://mnml.gg/api/account/recover?recover_token={:} + +glhf +--mnml", name, token); + + LettreEmail::builder() + .from("machines@mnml.gg") + .to(email.clone()) + .subject("account recovery") + .text(body) + .build() + .unwrap() + .into() +} + +fn confirm(email: &String, name: &String, token: &String) -> SendableEmail { + let confirm_body = format!("{:}, +please click the link below to confirm your email +http://mnml.gg/api/account/email/confirm?confirm_token={:} + +glhf +--mnml", name, token); + + LettreEmail::builder() + .from("machines@mnml.gg") + .to(email.clone()) + .subject("email confirmation") + .text(confirm_body) + .build() + .unwrap() + .into() +} + +pub fn send_mail(mailer: &mut SmtpTransport, mail: Mail) -> Result { + let msg = match mail { + Mail::Recover { email, name, token } => recover(&email, &name, &token), + Mail::Confirm { email, name, token } => confirm(&email, &name, &token), + }; + + mailer.send(msg) +} + +pub fn confirm_email(tx: &mut Transaction, account: &Account, confirm_token: String) -> Result<(String, Uuid), Error> { + let query = " + UPDATE emails + SET confirmed = true, updated_at = now() + WHERE confirm_token = $1 + AND account = $2 + RETURNING id, email, account + "; + + let result = tx + .query(query, &[&confirm_token, &account.id])?; + + let row = result.iter().next() + .ok_or(format_err!("confirm_token not found {:?}", confirm_token))?; + + let _id: Uuid = row.get(0); + let email: String = row.get(1); + let account: Uuid = row.get(2); + + return Ok((email, account)); +} + +pub fn select(db: &Db, email: &String) -> Result { + let query = " + SELECT id, email, account, confirmed + FROM emails + WHERE email = $1; + "; + + let result = db + .query(query, &[&email])?; + + let row = result.iter().next() + .ok_or(err_msg("email found"))?; + + let id: Uuid = row.get(0); + let email: String = row.get(1); + let account: Uuid = row.get(2); + let confirmed: bool = row.get(3); + + return Ok(Email { id, email, account, confirmed }); +} + +pub fn select_account(db: &Db, account: Uuid) -> Result, Error> { + let query = " + SELECT id, email, account, confirmed + FROM emails + WHERE account = $1; + "; + + let result = db + .query(query, &[&account])?; + + let row = match result.iter().next() { + Some(r) => r, + None => return Ok(None), + }; + + let id: Uuid = row.get(0); + let email: String = row.get(1); + let account: Uuid = row.get(2); + let confirmed: bool = row.get(3); + + return Ok(Some(Email { id, email, account, confirmed })); +} + +pub fn set_recovery(tx: &mut Transaction, email: &String) -> Result { + let mut rng = thread_rng(); + let recover_token: String = iter::repeat(()) + .map(|()| rng.sample(Alphanumeric)) + .take(64) + .collect(); + + let query = " + UPDATE emails + SET recover_token = $1, recover_token_expiry = now() + interval '2 days' + WHERE email = $2 + AND confirmed = true + RETURNING id, email, account + "; + + let result = tx + .query(query, &[&recover_token, &email])?; + + let row = result.iter().next() + .ok_or(format_err!("no confirmed email found {:?}", email))?; + + let _id: Uuid = row.get(0); + let _email: String = row.get(1); + let _account: Uuid = row.get(2); + + return Ok(recover_token); +} + +pub fn get_recovery(tx: &mut Transaction, recover_token: &String) -> Result { + // set a new token when recovering to prevent multiple access + let mut rng = thread_rng(); + let new_token: String = iter::repeat(()) + .map(|()| rng.sample(Alphanumeric)) + .take(64) + .collect(); + + let query = " + UPDATE emails + SET recover_token = $1, recover_token_expiry = now() + WHERE recover_token = $2 + AND recover_token_expiry > now() + AND confirmed = true + RETURNING id, email, account, confirmed; + "; + + let result = tx + .query(query, &[&new_token, &recover_token])?; + + let row = result.iter().next() + .ok_or(err_msg("no confirmed email found"))?; + + let id: Uuid = row.get(0); + let email: String = row.get(1); + let account: Uuid = row.get(2); + let confirmed: bool = row.get(3); + + return Ok(Email { id, email, account, confirmed }); +} + +pub fn set(tx: &mut Transaction, account: Uuid, email: &String) -> Result<(Uuid, String), Error> { + let id = Uuid::new_v4(); + + let mut rng = thread_rng(); + let confirm_token: String = iter::repeat(()) + .map(|()| rng.sample(Alphanumeric)) + .take(64) + .collect(); + + let recover_token: String = iter::repeat(()) + .map(|()| rng.sample(Alphanumeric)) + .take(64) + .collect(); + + let insert_query = " + INSERT INTO emails (id, account, email, confirm_token, confirmed, recover_token) + VALUES ($1, $2, $3, $4, false, $5) + RETURNING id; + "; + + let update_query = " + UPDATE emails + SET email = $1, confirm_token = $2, confirmed = false, recover_token = $3 + WHERE account = $4 + RETURNING id; + "; + + let result = match tx.query(insert_query, &[&id, &account, &email, &confirm_token, &recover_token]) { + Ok(r) => r, + // email update probably + Err(_) => { + match tx.query(update_query, &[&email, &confirm_token, &recover_token, &account]) { + Ok(r) => r, + Err(e) => { + warn!("{:?}", e); + return Err(err_msg("no email set")); + }, + } + } + }; + + match result.iter().next() { + Some(row) => row, + None => return Err(err_msg("no email set")), + }; + + return Ok((id, confirm_token)); +} + +pub fn listen(rx: Receiver) -> SmtpTransport { + let sender = env::var("MAIL_ADDRESS") + .expect("MAIL_ADDRESS must be set"); + + let password = env::var("MAIL_PASSWORD") + .expect("MAIL_PASSWORD must be set"); + + let domain = env::var("MAIL_DOMAIN") + .expect("MAIL_DOMAIN must be set"); + + let mut mailer = SmtpClient::new_simple("smtp.gmail.com").unwrap() + .hello_name(ClientId::Domain(domain)) + .credentials(Credentials::new(sender, password)) + .smtp_utf8(true) + .authentication_mechanism(Mechanism::Plain) + .connection_reuse(ConnectionReuseParameters::ReuseUnlimited) + .transport(); + + info!("mail connected"); + + // loop { + // match rx.recv() { + // Ok(m) => match send_mail(&mut mailer, m) { + // Ok(r) => info!("{:?}", r), + // Err(e) => warn!("{:?}", e), + // }, + // Err(e) => { + // error!("{:?}", e); + // panic!("mail thread cannot continue"); + // }, + // }; + // } + + // Explicitly close the SMTP transaction as we enabled connection reuse + // mailer.close(); + return mailer; +} + diff --git a/server/src/main.rs b/server/src/main.rs index 44102168..b452f137 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -21,11 +21,15 @@ extern crate stripe; extern crate iron; extern crate bodyparser; +extern crate urlencoded; extern crate persistent; extern crate router; extern crate mount; extern crate cookie; +extern crate lettre; +extern crate lettre_email; + extern crate ws; extern crate crossbeam_channel; @@ -37,6 +41,7 @@ mod game; mod instance; mod item; mod img; +mod mail; mod mob; mod mtx; mod names; @@ -54,33 +59,70 @@ mod warden; use std::thread::{spawn}; use std::path::{Path}; - +use fern::colors::{Color, ColoredLevelConfig}; use crossbeam_channel::{unbounded}; +#[derive(Serialize)] +struct JsonLog { + time: String, + module: String, + level: String, + msg: String, +} + fn setup_logger() -> Result<(), fern::InitError> { - fern::Dispatch::new() - .format(|out, message, record| { + let colors_line = ColoredLevelConfig::new() + .error(Color::Red) + .warn(Color::Yellow) + .info(Color::BrightWhite) + .debug(Color::BrightWhite) + .trace(Color::BrightBlack); + + let colors_level = colors_line.clone().info(Color::Green); + let term = fern::Dispatch::new() + .format(move |out, message, record| { out.finish(format_args!( - "{}[{}][{}] {}", - chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), - record.target(), - record.level(), - message + "{color_line}{date} {target} {level}{color_line} {message}\x1B[0m", + color_line = format_args!("\x1B[{}m", colors_line.get_color(&record.level()).to_fg_str()), + date = chrono::Local::now().format("%Y-%m-%d %H:%M:%S"), + target = record.target(), + level = colors_level.color(record.level()), + message = message, + )); + }) + .chain(std::io::stdout()); + + let json = fern::Dispatch::new() + .format(|out, message, record| { + let json = JsonLog { + time: chrono::Local::now().to_rfc3339(), + module: record.target().to_string(), + level: record.level().to_string(), + msg: message.to_string() + }; + + out.finish(format_args!( + "{}", + serde_json::to_string(&json).unwrap(), )) }) + .chain(fern::log_file("/var/log/mnml/mnml.log")?); + + fern::Dispatch::new() .level_for("postgres", log::LevelFilter::Info) .level_for("ws", log::LevelFilter::Warn) .level_for("iron", log::LevelFilter::Info) .level(log::LevelFilter::Info) - .chain(std::io::stdout()) - .chain(fern::log_file("/var/log/mnml/mnml.log")?) + .chain(term) + .chain(json) .apply()?; + Ok(()) } fn main() { - dotenv::from_path(Path::new("/etc/mnml/server.conf")).ok(); setup_logger().unwrap(); + dotenv::from_path(Path::new("/etc/mnml/gs.conf")).ok(); let pool = pg::create_pool(); let http_pool = pool.clone(); @@ -93,14 +135,18 @@ fn main() { let events_warden_tx = warden_tx.clone(); let warden_tick_tx = warden_tx.clone(); + let (mail_tx, mail_rx) = unbounded(); + let http_mail_tx = mail_tx.clone(); + // create a clone of the tx so ws handler can tell events // about connection status - let events = events::Events::new(events_tx, events_rx, events_warden_tx); + let events = events::Events::new(events_tx, events_rx, events_warden_tx, mail_tx); let warden = warden::Warden::new(warden_tx, warden_rx, events.tx.clone(), pool.clone()); let pg_pool = pool.clone(); + let mailer = mail::listen(mail_rx); - spawn(move || http::start(http_pool)); + spawn(move || http::start(http_pool, mailer)); spawn(move || warden.listen()); spawn(move || warden::upkeep_tick(warden_tick_tx)); spawn(move || pg::listen(pg_pool, pg_events_tx)); diff --git a/server/src/rpc.rs b/server/src/rpc.rs index 7bb2064e..eb97be9a 100644 --- a/server/src/rpc.rs +++ b/server/src/rpc.rs @@ -22,13 +22,15 @@ use game::{Game, game_state, game_skill, game_ready}; use instance::{Instance, instance_state, instance_practice, instance_ready}; use item::{Item, ItemInfoCtr, item_info}; use mtx; +use mail; +use mail::Email; use pg::{Db}; use pg::{PgPool}; use skill::{Skill, dev_resolve, Resolutions}; use vbox::{vbox_accept, vbox_apply, vbox_discard, vbox_combine, vbox_reclaim, vbox_unequip}; use http::{AUTH_CLEAR, TOKEN_HEADER}; -#[derive(Debug,Clone,Serialize,Deserialize)] +#[derive(Debug,Clone,Serialize)] pub enum RpcMessage { AccountState(Account), AccountConstructs(Vec), @@ -36,6 +38,7 @@ pub enum RpcMessage { AccountInstances(Vec), AccountShop(mtx::Shop), ConstructSpawn(Construct), + EmailState(Email), GameState(Game), ItemInfo(ItemInfoCtr), @@ -225,6 +228,15 @@ impl Handler for Connection { let db = self.pool.get().unwrap(); let mut tx = db.transaction().unwrap(); + // email state + match mail::select_account(&db, a.id).unwrap() { + Some(e) => { + self.ws.send(RpcMessage::EmailState(e.clone())).unwrap(); + self.events.send(Event::Subscribe(self.id, e.id)).unwrap(); + }, + None => (), + }; + // send account constructs let account_constructs = account::constructs(&mut tx, a).unwrap(); self.ws.send(RpcMessage::AccountConstructs(account_constructs)).unwrap(); @@ -308,7 +320,7 @@ impl Handler for Connection { // got auth token if cookie.name() == TOKEN_HEADER { let db = self.pool.get().unwrap(); - match account::from_token(&db, cookie.value().to_string()) { + match account::from_token(&db, &cookie.value().to_string()) { Ok(a) => self.account = Some(a), Err(_) => return unauth(), }