diff --git a/.github/ISSUE_TEMPLATE/--bug-report.md b/.github/ISSUE_TEMPLATE/--bug-report.md index b0d61217880..417f1eee1f8 100644 --- a/.github/ISSUE_TEMPLATE/--bug-report.md +++ b/.github/ISSUE_TEMPLATE/--bug-report.md @@ -5,6 +5,10 @@ label: bug --- +**If you need support for Nightscout, PLEASE DO NOT FILE A TICKET HERE** +For support, please post a question to the "CGM in The Cloud" group in Facebook +(https://www.facebook.com/groups/cgminthecloud) or visit the WeAreNotWaiting Discord at https://discord.gg/zg7CvCQ + **Describe the bug** A clear and concise description of what the bug is. diff --git a/.github/ISSUE_TEMPLATE/--feature-request--.md b/.github/ISSUE_TEMPLATE/--feature-request--.md index a94a261abf8..293efbc9c1e 100644 --- a/.github/ISSUE_TEMPLATE/--feature-request--.md +++ b/.github/ISSUE_TEMPLATE/--feature-request--.md @@ -4,6 +4,10 @@ about: Suggest an idea for this project --- +**If you need support for Nightscout, PLEASE DO NOT FILE A TICKET HERE** +For support, please post a question to the "CGM in The Cloud" group in Facebook +(https://www.facebook.com/groups/cgminthecloud) or visit the WeAreNotWaiting Discord at https://discord.gg/zg7CvCQ + **Is your feature request related to a problem? Please describe.** A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] diff --git a/.github/ISSUE_TEMPLATE/--individual-troubleshooting-help.md b/.github/ISSUE_TEMPLATE/--individual-troubleshooting-help.md index ff6e27b7682..e4c9b15d63f 100644 --- a/.github/ISSUE_TEMPLATE/--individual-troubleshooting-help.md +++ b/.github/ISSUE_TEMPLATE/--individual-troubleshooting-help.md @@ -6,6 +6,8 @@ about: Getting help with your own individual setup of Nightscout Having issues getting Nightscout up and running? Instead of creating an issue here, please use one of the existing support channels for Nightscout. +The documentation for Nightscout lives at (https://nightscout.github.io) + The main support channel is on Facebook: please join the CGM In The Cloud Facebook group (https://www.facebook.com/groups/cgminthecloud) and start a post there. **Suggestions to include in your post when you are asking for help:** @@ -13,4 +15,4 @@ The main support channel is on Facebook: please join the CGM In The Cloud Facebo 2. Include which step you are on and what the problem is: ("*I deployed on Heroku, but I'm not seeing any BG data.*") 3. If possible, include a link to the version of documentation you are following ("*I'm following the OpenAPS Nightscout setup docs (https://openaps.readthedocs.io/en/latest/docs/While%20You%20Wait%20For%20Gear/nightscout-setup.html#nightscout-setup-with-heroku)*") -Other places you can find support and assistance for Nightscout include Gitter's [nightscout/public](https://gitter.im/nightscout/public) channel. +Other places you can find support and assistance for Nightscout include our Discord channel at (https://discord.gg/zg7CvCQ) diff --git a/app.js b/app.js index f46db9c942e..c6528819bb9 100644 --- a/app.js +++ b/app.js @@ -132,8 +132,25 @@ function create (env, ctx) { )); }); + // Allow static resources to be cached for week + var maxAge = 7 * 24 * 60 * 60 * 1000; + + if (process.env.NODE_ENV === 'development') { + maxAge = 1; + console.log('Development environment detected, setting static file cache age to 1 second'); + } + + var staticFiles = express.static(env.static_files, { + maxAge + }); + + // serve the static content + app.use(staticFiles); + if (ctx.bootErrors && ctx.bootErrors.length > 0) { - app.get('*', require('./lib/server/booterror')(ctx)); + const bootErrorView = require('./lib/server/booterror')(env, ctx); + bootErrorView.setLocals(app.locals); + app.get('*', bootErrorView); return app; } @@ -256,36 +273,6 @@ function create (env, ctx) { res.sendFile(__dirname + '/swagger.yaml'); }); - /* // FOR DEBUGGING MEMORY LEEAKS - if (env.settings.isEnabled('dumps')) { - var heapdump = require('heapdump'); - app.get('/api/v2/dumps/start', function(req, res) { - var path = new Date().toISOString() + '.heapsnapshot'; - path = path.replace(/:/g, '-'); - console.info('writing dump to', path); - heapdump.writeSnapshot(path); - res.send('wrote dump to ' + path); - }); - } - */ - - // app.get('/package.json', software); - - // Allow static resources to be cached for week - var maxAge = 7 * 24 * 60 * 60 * 1000; - - if (process.env.NODE_ENV === 'development') { - maxAge = 1; - console.log('Development environment detected, setting static file cache age to 1 second'); - } - - var staticFiles = express.static(env.static_files, { - maxAge - }); - - // serve the static content - app.use(staticFiles); - // API docs const swaggerUi = require('swagger-ui-express'); diff --git a/env.js b/env.js index c753b29cd03..275271bdf26 100644 --- a/env.js +++ b/env.js @@ -14,9 +14,20 @@ var env = { settings: require('./lib/settings')() }; +var shadowEnv; + // Module to constrain all config and environment parsing to one spot. // See README.md for info about all the supported ENV VARs function config ( ) { + + // Assume users will typo whitespaces into keys and values + + shadowEnv = {}; + + Object.keys(process.env).forEach((key, index) => { + shadowEnv[_trim(key)] = _trim(process.env[key]); + }); + env.PORT = readENV('PORT', 1337); env.HOSTNAME = readENV('HOSTNAME', null); env.IMPORT_CONFIG = readENV('IMPORT_CONFIG', null); @@ -122,7 +133,7 @@ function updateSettings() { }); //should always find extended settings last - env.extendedSettings = findExtendedSettings(process.env); + env.extendedSettings = findExtendedSettings(shadowEnv); if (!readENVTruthy('TREATMENTS_AUTH', true)) { env.settings.authDefaultRoles = env.settings.authDefaultRoles || ""; @@ -132,10 +143,10 @@ function updateSettings() { function readENV(varName, defaultValue) { //for some reason Azure uses this prefix, maybe there is a good reason - var value = process.env['CUSTOMCONNSTR_' + varName] - || process.env['CUSTOMCONNSTR_' + varName.toLowerCase()] - || process.env[varName] - || process.env[varName.toLowerCase()]; + var value = shadowEnv['CUSTOMCONNSTR_' + varName] + || shadowEnv['CUSTOMCONNSTR_' + varName.toLowerCase()] + || shadowEnv[varName] + || shadowEnv[varName.toLowerCase()]; if (varName == 'DISPLAY_UNITS') { if (value && value.toLowerCase().includes('mmol')) { @@ -162,7 +173,7 @@ function findExtendedSettings (envs) { extended.devicestatus = {}; extended.devicestatus.advanced = true; extended.devicestatus.days = 1; - if(process.env['DEVICESTATUS_DAYS'] && process.env['DEVICESTATUS_DAYS'] == '2') extended.devicestatus.days = 1; + if(shadowEnv['DEVICESTATUS_DAYS'] && shadowEnv['DEVICESTATUS_DAYS'] == '2') extended.devicestatus.days = 1; function normalizeEnv (key) { return key.toUpperCase().replace('CUSTOMCONNSTR_', ''); diff --git a/lib/data/dataloader.js b/lib/data/dataloader.js index 0f1b3c7bd86..7074b18e1fb 100644 --- a/lib/data/dataloader.js +++ b/lib/data/dataloader.js @@ -2,7 +2,6 @@ const _ = require('lodash'); const async = require('async'); -const times = require('../times'); const fitTreatmentsToBGCurve = require('./treatmenttocurve'); const constants = require('../constants'); @@ -144,8 +143,11 @@ function init(env, ctx) { done(err, result); } - // clear treatments to the base set, we're going to merge from multiple queries - ddata.treatments = []; // ctx.cache.treatments ? _.cloneDeep(ctx.cache.treatments) : []; + // clear data we'll get from the cache + + ddata.treatments = []; + ddata.devicestatus = []; + ddata.entries = []; ddata.dbstats = {}; @@ -196,11 +198,8 @@ function loadEntries(ddata, ctx, callback) { if (!err && results) { - const ageFilter = ddata.lastUpdated - constants.TWO_DAYS; const r = ctx.ddata.processRawDataForRuntime(results); - ctx.cache.insertData('entries', r, ageFilter); - - const currentData = ctx.cache.getData('entries').reverse(); + const currentData = ctx.cache.insertData('entries', r).reverse(); const mbgs = []; const sgvs = []; @@ -324,12 +323,11 @@ function loadTreatments(ddata, ctx, callback) { ctx.treatments.list(tq, function(err, results) { if (!err && results) { - const ageFilter = ddata.lastUpdated - longLoad; - const r = ctx.ddata.processRawDataForRuntime(results); - // update cache - ctx.cache.insertData('treatments', r, ageFilter); - ddata.treatments = ctx.ddata.idMergePreferNew(ddata.treatments, ctx.cache.getData('treatments')); + // update cache and apply to runtime data + const r = ctx.ddata.processRawDataForRuntime(results); + const currentData = ctx.cache.insertData('treatments', r); + ddata.treatments = ctx.ddata.idMergePreferNew(ddata.treatments, currentData); } callback(); @@ -361,7 +359,6 @@ function loadProfileSwitchTreatments(ddata, ctx, callback) { ctx.treatments.list(tq, function(err, results) { if (!err && results) { ddata.treatments = mergeProcessSort(ddata.treatments, results); - //mergeToTreatments(ddata, results); } // Store last profile switch @@ -418,7 +415,6 @@ function loadLatestSingle(ddata, ctx, dataType, callback) { ctx.treatments.list(tq, function(err, results) { if (!err && results) { ddata.treatments = mergeProcessSort(ddata.treatments, results); - //mergeToTreatments(ddata, results); } callback(); }); @@ -473,16 +469,12 @@ function loadDeviceStatus(ddata, env, ctx, callback) { ctx.devicestatus.list(opts, function(err, results) { if (!err && results) { -// ctx.cache.devicestatus = mergeProcessSort(ctx.cache.devicestatus, results, ageFilter); - const ageFilter = ddata.lastUpdated - longLoad; + // update cache and apply to runtime data const r = ctx.ddata.processRawDataForRuntime(results); - ctx.cache.insertData('devicestatus', r, ageFilter); - - const res = ctx.cache.getData('devicestatus'); + const currentData = ctx.cache.insertData('devicestatus', r); - const res2 = _.map(res, function eachStatus(result) { - //result.mills = new Date(result.created_at).getTime(); + const res2 = _.map(currentData, function eachStatus(result) { if ('uploaderBattery' in result) { result.uploader = { battery: result.uploaderBattery @@ -492,7 +484,7 @@ function loadDeviceStatus(ddata, env, ctx, callback) { return result; }); - ddata.devicestatus = mergeProcessSort(ddata.devicestatus, res2, ageFilter); + ddata.devicestatus = mergeProcessSort(ddata.devicestatus, res2); } else { ddata.devicestatus = []; } diff --git a/lib/data/ddata.js b/lib/data/ddata.js index 65120782fc0..9389f10d360 100644 --- a/lib/data/ddata.js +++ b/lib/data/ddata.js @@ -32,13 +32,15 @@ function init () { Object.keys(obj).forEach(key => { if (typeof obj[key] === 'object' && obj[key]) { - if (obj[key].hasOwnProperty('_id')) { + if (Object.prototype.hasOwnProperty.call(obj[key], '_id')) { obj[key]._id = obj[key]._id.toString(); } - if (obj[key].hasOwnProperty('created_at') && !obj[key].hasOwnProperty('mills')) { + if (Object.prototype.hasOwnProperty.call(obj[key], 'created_at') + && !Object.prototype.hasOwnProperty.call(obj[key], 'mills')) { obj[key].mills = new Date(obj[key].created_at).getTime(); } - if (obj[key].hasOwnProperty('sysTime') && !obj[key].hasOwnProperty('mills')) { + if (Object.prototype.hasOwnProperty.call(obj[key], 'sysTime') + && !Object.prototype.hasOwnProperty.call(obj[key], 'mills')) { obj[key].mills = new Date(obj[key].sysTime).getTime(); } } diff --git a/lib/profilefunctions.js b/lib/profilefunctions.js index 0d92a7135a0..9fac4984949 100644 --- a/lib/profilefunctions.js +++ b/lib/profilefunctions.js @@ -61,6 +61,9 @@ function init (profileData) { // preprocess the timestamps to seconds for a couple orders of magnitude faster operation profile.preprocessProfileOnLoad = function preprocessProfileOnLoad (container) { _.each(container, function eachValue (value) { + + if (value === null) return; + if (Object.prototype.toString.call(value) === '[object Array]') { profile.preprocessProfileOnLoad(value); } diff --git a/lib/server/booterror.js b/lib/server/booterror.js index f08eb06db39..d8735e94451 100644 --- a/lib/server/booterror.js +++ b/lib/server/booterror.js @@ -1,22 +1,46 @@ 'use strict'; +const express = require('express'); +const path = require('path'); var _ = require('lodash'); -var head = 'Nightscout - Boot Error

Nightscout - Boot Error

'; -var tail = '
'; +function bootError(env, ctx) { -function bootError(ctx) { + const app = new express(); + let locals = {}; + + app.set('view engine', 'ejs'); + app.engine('html', require('ejs').renderFile); + app.set("views", path.join(__dirname, "../../views/")); + + app.get('*', (req, res, next) => { + + if (req.url.includes('images')) return next(); - return function pageHandler (req, res) { var errors = _.map(ctx.bootErrors, function (obj) { - obj.err = _.pick(obj.err, Object.getOwnPropertyNames(obj.err)); - return '
' + obj.desc + '
' + JSON.stringify(obj.err).replace(/\\n/g, '
') + '
'; + + let message; + + if (typeof obj.err === 'string' || obj.err instanceof String) { + message = obj.err; + } else { + message = JSON.stringify(_.pick(obj.err, Object.getOwnPropertyNames(obj.err))); + } + return '
' + obj.desc + '
' + message.replace(/\\n/g, '
') + '
'; }).join(' '); - res.set('Content-Type', 'text/html'); - res.send(head + errors + tail); + res.render('error.html', { + errors, + locals + }); + + }); + app.setLocals = function (_locals) { + locals = _locals; } + + return app; } module.exports = bootError; \ No newline at end of file diff --git a/lib/server/bootevent.js b/lib/server/bootevent.js index 17f0319b360..6247645b3d2 100644 --- a/lib/server/bootevent.js +++ b/lib/server/bootevent.js @@ -95,6 +95,25 @@ function boot (env, language) { } } + function checkSettings (ctx, next) { + + ctx.bootErrors = ctx.bootErrors || []; + + console.log('Checking settings'); + + if (!env.storageURI) { + ctx.bootErrors.push({'desc': 'Mandatory setting missing', + err: 'MONGODB_URI setting is missing, cannot connect to database'}); + } + + if (!env.api_secret) { + ctx.bootErrors.push({'desc': 'Mandatory setting missing', + err: 'API_SECRET setting is missing, cannot enable REST API'}); + } + + next(); + } + function setupStorage (ctx, next) { if (hasBootErrors(ctx)) { @@ -107,7 +126,6 @@ function boot (env, language) { if (err) { throw err; } - ctx.store = store; console.log('OpenAPS Storage system ready'); next(); @@ -116,14 +134,18 @@ function boot (env, language) { //TODO assume mongo for now, when there are more storage options add a lookup require('../storage/mongo-storage')(env, function ready(err, store) { // FIXME, error is always null, if there is an error, the index.js will throw an exception + if (err) { + console.info('ERROR CONNECTING TO MONGO', err); + ctx.bootErrors = ctx.bootErrors || [ ]; + ctx.bootErrors.push({'desc': 'Unable to connect to Mongo', err: err}); + } console.log('Mongo Storage system ready'); ctx.store = store; - next(); }); } } catch (err) { - console.info('mongo err', err); + console.info('ERROR CONNECTING TO MONGO', err); ctx.bootErrors = ctx.bootErrors || [ ]; ctx.bootErrors.push({'desc': 'Unable to connect to Mongo', err: err}); next(); @@ -233,7 +255,7 @@ function boot (env, language) { }); ctx.bus.on('data-loaded', function updatePlugins ( ) { - console.info('reloading sandbox data'); + // console.info('reloading sandbox data'); var sbx = require('../sandbox')().serverInit(env, ctx); ctx.plugins.setProperties(sbx); ctx.notifications.initRequests(); @@ -285,6 +307,7 @@ function boot (env, language) { .acquire(checkNodeVersion) .acquire(checkEnv) .acquire(augmentSettings) + .acquire(checkSettings) .acquire(setupStorage) .acquire(setupAuthorization) .acquire(setupInternals) diff --git a/lib/server/cache.js b/lib/server/cache.js index fca93afafde..1b2576ce029 100644 --- a/lib/server/cache.js +++ b/lib/server/cache.js @@ -23,27 +23,33 @@ function cache (env, ctx) { , entries: [] }; - const dataArray = [ - data.treatments - , data.devicestatus - , data.entries - ]; + const retentionPeriods = { + treatments: constants.ONE_HOUR * 60 + , devicestatus: env.extendedSettings.devicestatus && env.extendedSettings.devicestatus.days && env.extendedSettings.devicestatus.days == 2 ? constants.TWO_DAYS : constants.ONE_DAY + , entries: constants.TWO_DAYS + }; + function mergeCacheArrays (oldData, newData, retentionPeriod) { - function mergeCacheArrays (oldData, newData, ageLimit) { + const ageLimit = Date.now() - retentionPeriod; - var filtered = _.filter(newData, function hasId (object) { - const hasId = !_.isEmpty(object._id); - const isFresh = (ageLimit && object.mills >= ageLimit) || (!ageLimit); - return isFresh && hasId; - }); + var filteredOld = filterForAge(oldData, ageLimit); + var filteredNew = filterForAge(newData, ageLimit); - const merged = ctx.ddata.idMergePreferNew(oldData, filtered); + const merged = ctx.ddata.idMergePreferNew(filteredOld, filteredNew); return _.sortBy(merged, function(item) { return -item.mills; }); + function filterForAge(data, ageLimit) { + return _.filter(data, function hasId(object) { + const hasId = !_.isEmpty(object._id); + const isFresh = object.mills >= ageLimit; + return isFresh && hasId; + }); + } + } data.isEmpty = (datatype) => { @@ -54,20 +60,17 @@ function cache (env, ctx) { return _.cloneDeep(data[datatype]); } - data.insertData = (datatype, newData, retentionPeriod) => { - data[datatype] = mergeCacheArrays(data[datatype], newData, retentionPeriod); + data.insertData = (datatype, newData) => { + data[datatype] = mergeCacheArrays(data[datatype], newData, retentionPeriods[datatype]); + return data.getData(datatype); } function dataChanged (operation) { - //console.log('Cache data operation requested', operation); - if (!data[operation.type]) return; if (operation.op == 'remove') { - //console.log('Cache data delete event'); // if multiple items were deleted, flush entire cache if (!operation.changes) { - //console.log('Multiple items delete from cache, flushing all') data.treatments = []; data.devicestatus = []; data.entries = []; @@ -76,9 +79,8 @@ function cache (env, ctx) { } } - if (operation.op == 'update') { - //console.log('Cache data update event'); - data[operation.type] = mergeCacheArrays(data[operation.type], operation.changes); + if (operation.op == 'update') { + data[operation.type] = mergeCacheArrays(data[operation.type], operation.changes, retentionPeriods[operation.type]); } } @@ -96,7 +98,6 @@ function cache (env, ctx) { } return data; - } module.exports = cache; diff --git a/lib/server/devicestatus.js b/lib/server/devicestatus.js index f3515367428..e228f5b372c 100644 --- a/lib/server/devicestatus.js +++ b/lib/server/devicestatus.js @@ -5,33 +5,38 @@ var find_options = require('./query'); function storage (collection, ctx) { - function create(obj, fn) { - + function create (obj, fn) { + // Normalize all dates to UTC const d = moment(obj.created_at).isValid() ? moment.parseZone(obj.created_at) : moment(); obj.created_at = d.toISOString(); obj.utcOffset = d.utcOffset(); - - api().insert(obj, function (err, doc) { - if (err != null && err.message) { + + api().insertOne(obj, function(err, results) { + if (err !== null && err.message) { console.log('Error inserting the device status object', err.message); fn(err.message, null); return; } - ctx.bus.emit('data-update', { - type: 'devicestatus', - op: 'update', - changes: ctx.ddata.processRawDataForRuntime([doc]) - }); + if (!err) { + + if (!obj._id) obj._id = results.insertedIds[0]._id; - fn(null, doc.ops); + ctx.bus.emit('data-update', { + type: 'devicestatus' + , op: 'update' + , changes: ctx.ddata.processRawDataForRuntime([obj]) + }); + } + + fn(null, results.ops); ctx.bus.emit('data-received'); }); } - function last(fn) { - return list({count: 1}, function (err, entries) { + function last (fn) { + return list({ count: 1 }, function(err, entries) { if (entries && entries.length > 0) { fn(err, entries[0]); } else { @@ -44,18 +49,18 @@ function storage (collection, ctx) { return find_options(opts, storage.queryOpts); } - function list(opts, fn) { + function list (opts, fn) { // these functions, find, sort, and limit, are used to // dynamically configure the request, based on the options we've // been given // determine sort options - function sort ( ) { - return opts && opts.sort || {created_at: -1}; + function sort () { + return opts && opts.sort || { created_at: -1 }; } // configure the limit portion of the current query - function limit ( ) { + function limit () { if (opts && opts.count) { return this.limit(parseInt(opts.count)); } @@ -68,31 +73,31 @@ function storage (collection, ctx) { } // now just stitch them all together - limit.call(api( ) - .find(query_for(opts)) - .sort(sort( )) + limit.call(api() + .find(query_for(opts)) + .sort(sort()) ).toArray(toArray); } function remove (opts, fn) { - function removed(err, stat) { + function removed (err, stat) { ctx.bus.emit('data-update', { - type: 'devicestatus', - op: 'remove', - count: stat.result.n, - changes: opts.find._id + type: 'devicestatus' + , op: 'remove' + , count: stat.result.n + , changes: opts.find._id }); fn(err, stat); } - return api( ).remove( + return api().remove( query_for(opts), removed); } - function api() { + function api () { return ctx.store.collection(collection); } @@ -101,9 +106,10 @@ function storage (collection, ctx) { api.query_for = query_for; api.last = last; api.remove = remove; - api.aggregate = require('./aggregate')({ }, api); + api.aggregate = require('./aggregate')({}, api); api.indexedFields = [ 'created_at' + , 'NSCLIENT_ID' ]; return api; diff --git a/lib/server/entries.js b/lib/server/entries.js index f6b61024e7d..50c8e0cc41a 100644 --- a/lib/server/entries.js +++ b/lib/server/entries.js @@ -10,60 +10,60 @@ var moment = require('moment'); * Encapsulate persistent storage of sgv entries. \**********/ -function storage(env, ctx) { +function storage (env, ctx) { // TODO: Code is a little redundant. // query for entries from storage function list (opts, fn) { - // these functions, find, sort, and limit, are used to - // dynamically configure the request, based on the options we've - // been given + // these functions, find, sort, and limit, are used to + // dynamically configure the request, based on the options we've + // been given - // determine sort options - function sort ( ) { - return opts && opts.sort || {date: -1}; - } + // determine sort options + function sort () { + return opts && opts.sort || { date: -1 }; + } - // configure the limit portion of the current query - function limit ( ) { - if (opts && opts.count) { - return this.limit(parseInt(opts.count)); - } - return this; + // configure the limit portion of the current query + function limit () { + if (opts && opts.count) { + return this.limit(parseInt(opts.count)); } + return this; + } - // handle all the results - function toArray (err, entries) { - fn(err, entries); - } + // handle all the results + function toArray (err, entries) { + fn(err, entries); + } - // now just stitch them all together - limit.call(api( ) - .find(query_for(opts)) - .sort(sort( )) - ).toArray(toArray); + // now just stitch them all together + limit.call(api() + .find(query_for(opts)) + .sort(sort()) + ).toArray(toArray); } function remove (opts, fn) { - api( ).remove(query_for(opts), function (err, stat) { + api().remove(query_for(opts), function(err, stat) { ctx.bus.emit('data-update', { - type: 'entries', - op: 'remove', - count: stat.result.n, - changes: opts.find._id + type: 'entries' + , op: 'remove' + , count: stat.result.n + , changes: opts.find._id }); //TODO: this is triggering a read from Mongo, we can do better - ctx.bus.emit('data-received'); - fn(err, stat); - }); + ctx.bus.emit('data-received'); + fn(err, stat); + }); } // return writable stream to lint each sgv record passing through it // TODO: get rid of this? not doing anything now - function map ( ) { + function map () { return es.map(function iter (item, next) { return next(null, item); }); @@ -80,7 +80,7 @@ function storage(env, ctx) { create(result, fn); } // lint and store the entire list - return es.pipeline(map( ), es.writeArray(done)); + return es.pipeline(map(), es.writeArray(done)); } //TODO: implement @@ -91,9 +91,9 @@ function storage(env, ctx) { // store new documents using the storage mechanism function create (docs, fn) { // potentially a batch insert - var firstErr = null, - numDocs = docs.length, - totalCreated = 0; + var firstErr = null + , numDocs = docs.length + , totalCreated = 0; docs.forEach(function(doc) { @@ -106,15 +106,21 @@ function storage(env, ctx) { doc.sysTime = _sysTime.toISOString(); if (doc.dateString) doc.dateString = doc.sysTime; - var query = (doc.sysTime && doc.type) ? {sysTime: doc.sysTime, type: doc.type} : doc; - api( ).update(query, doc, {upsert: true}, function (err) { + var query = (doc.sysTime && doc.type) ? { sysTime: doc.sysTime, type: doc.type } : doc; + api().update(query, doc, { upsert: true }, function(err, updateResults) { firstErr = firstErr || err; - ctx.bus.emit('data-update', { - type: 'entries', - op: 'update', - changes: ctx.ddata.processRawDataForRuntime([doc]) - }); + if (!err) { + if (updateResults.result.upserted) { + doc._id = updateResults.result.upserted[0]._id + } + + ctx.bus.emit('data-update', { + type: 'entries' + , op: 'update' + , changes: ctx.ddata.processRawDataForRuntime([doc]) + }); + } if (++totalCreated === numDocs) { //TODO: this is triggering a read from Mongo, we can do better @@ -125,8 +131,8 @@ function storage(env, ctx) { }); } - function getEntry(id, fn) { - api( ).findOne({_id: ObjectID(id)}, function (err, entry) { + function getEntry (id, fn) { + api().findOne({ _id: ObjectID(id) }, function(err, entry) { if (err) { fn(err); } else { @@ -140,7 +146,7 @@ function storage(env, ctx) { } // closure to represent the API - function api ( ) { + function api () { // obtain handle usable for querying the collection associated // with these records return ctx.store.collection(env.entries_collection); @@ -154,15 +160,21 @@ function storage(env, ctx) { api.persist = persist; api.query_for = query_for; api.getEntry = getEntry; - api.aggregate = require('./aggregate')({ }, api); + api.aggregate = require('./aggregate')({}, api); api.indexedFields = [ 'date' + , 'type' + , 'sgv' + , 'mbg' + , 'sysTime' + , 'dateString' - , { 'type' : 1, 'date' : -1, 'dateString' : 1 } + + , { 'type': 1, 'date': -1, 'dateString': 1 } ]; return api; } @@ -176,7 +188,7 @@ storage.queryOpts = { , rssi: parseInt , noise: parseInt , mbg: parseInt - } + } , useEpoch: true }; diff --git a/lib/server/pebble.js b/lib/server/pebble.js index 544aa878268..97b756a9b84 100644 --- a/lib/server/pebble.js +++ b/lib/server/pebble.js @@ -176,7 +176,7 @@ function configure (env, ctx) { req.rawbg = env.settings.isEnabled('rawbg'); req.iob = env.settings.isEnabled('iob'); req.cob = env.settings.isEnabled('cob'); - req.mmol = (req.query.units || env.DISPLAY_UNITS) === 'mmol'; + req.mmol = (req.query.units || env.settings.units) === 'mmol'; req.count = parseInt(req.query.count) || 1; next( ); diff --git a/lib/server/treatments.js b/lib/server/treatments.js index dad13b5b5d6..a9107ea99b2 100644 --- a/lib/server/treatments.js +++ b/lib/server/treatments.js @@ -53,9 +53,7 @@ function storage (env, ctx) { if (!err) { if (updateResults.result.upserted) { obj._id = updateResults.result.upserted[0]._id - //console.log('PERSISTENCE: treatment upserted', updateResults.result.upserted[0]); } - //console.log('Update result', updateResults.result); } // TODO document this feature @@ -72,7 +70,14 @@ function storage (env, ctx) { } query.created_at = pbTreat.created_at; - api( ).update(query, pbTreat, {upsert: true}, function pbComplete (err) { + api( ).update(query, pbTreat, {upsert: true}, function pbComplete (err, updateResults) { + + if (!err) { + if (updateResults.result.upserted) { + pbTreat._id = updateResults.result.upserted[0]._id + } + } + var treatments = _.compact([obj, pbTreat]); ctx.bus.emit('data-update', { diff --git a/lib/settings.js b/lib/settings.js index 567e5d3f249..845cea07628 100644 --- a/lib/settings.js +++ b/lib/settings.js @@ -206,7 +206,9 @@ function init () { function getAndPrepare (key) { var raw = accessor(nameFromKey(key, nameType)) || ''; var cleaned = decodeURIComponent(raw).toLowerCase(); - return cleaned ? cleaned.split(' ') : []; + cleaned = cleaned ? cleaned.split(' ') : []; + cleaned = _.filter(cleaned, function(e) { return e !== ""; } ); + return cleaned; } function enableIf (feature, condition) { diff --git a/lib/storage/mongo-storage.js b/lib/storage/mongo-storage.js index 49c69b6d0f2..39c1a81e641 100644 --- a/lib/storage/mongo-storage.js +++ b/lib/storage/mongo-storage.js @@ -1,12 +1,11 @@ 'use strict'; var mongodb = require('mongodb'); - var connection = null; +var MongoClient = mongodb.MongoClient; +var mongo = {}; function init (env, cb, forceNewConnection) { - var MongoClient = mongodb.MongoClient; - var mongo = {}; function maybe_connect (cb) { @@ -20,63 +19,76 @@ function init (env, cb, forceNewConnection) { } } else { if (!env.storageURI) { - throw new Error('MongoDB connection string is missing. Please set MONGO_CONNECTION environment variable'); + throw new Error('MongoDB connection string is missing. Please set MONGODB_URI environment variable'); } console.log('Setting up new connection to MongoDB'); - var timeout = 30 * 1000; - var options = { reconnectInterval: 10000, reconnectTries: 500, connectTimeoutMS: timeout, - socketTimeoutMS: timeout, useNewUrlParser: true }; - - var connect_with_retry = async function(i) { - - try { - const client = await MongoClient.connect(env.storageURI, options); - console.log('Successfully established a connected to MongoDB'); - - var dbName = env.storageURI.split('/').pop().split('?'); - dbName=dbName[0]; // drop Connection Options - mongo.db = client.db(dbName); - connection = mongo.db; - mongo.client = client; - // If there is a valid callback, then invoke the function to perform the callback - - if (cb && cb.call) { - cb(null, mongo); - } - } catch (err) { - console.log('Mongo Error', err); - if (err.name && err.name === "MongoNetworkError") { - var timeout = (i > 15) ? 60000 : i*3000; - console.log('Error connecting to MongoDB: %j - retrying in ' + timeout/1000 + ' sec', err); - setTimeout(connect_with_retry, timeout, i+1); - } else { - throw new Error('MongoDB connection string '+env.storageURI+' seems invalid: '+ err.message) ; - } - } - + var timeout = 30 * 1000; + var options = { + reconnectInterval: 10000 + , reconnectTries: 500 + , connectTimeoutMS: timeout + , socketTimeoutMS: timeout + , useNewUrlParser: true }; - return connect_with_retry(1); + var connect_with_retry = function(i) { + + MongoClient.connect(env.storageURI, options) + .then(client => { + console.log('Successfully established a connected to MongoDB'); + + var dbName = env.storageURI.split('/').pop().split('?'); + dbName = dbName[0]; // drop Connection Options + mongo.db = client.db(dbName); + connection = mongo.db; + mongo.client = client; + // If there is a valid callback, then invoke the function to perform the callback + + if (cb && cb.call) { + cb(null, mongo); + } + }) + .catch(err => { + if (err.message && err.message.includes('AuthenticationFailed')) { + console.log('Authentication to Mongo failed'); + cb(new Error('MongoDB authentication failed! Double check the URL has the right username and password in MONGODB_URI.'), null); + return; + } + + if (err.name && err.name === "MongoNetworkError") { + var timeout = (i > 15) ? 60000 : i * 3000; + console.log('Error connecting to MongoDB: %j - retrying in ' + timeout / 1000 + ' sec', err); + setTimeout(connect_with_retry, timeout, i + 1); + if (i == 1) cb(new Error('MongoDB connection failed! Double check the MONGODB_URI setting in Heroku.'), null); + } else { + cb(new Error('MONGODB_URI ' + env.storageURI + ' seems invalid: ' + err.message)); + } + }); + + }; + + return connect_with_retry(1); + + } } - } - mongo.collection = function get_collection (name) { - return connection.collection(name); - }; - - mongo.ensureIndexes = function ensureIndexes (collection, fields) { - fields.forEach(function (field) { - console.info('ensuring index for: ' + field); - collection.createIndex(field, { 'background': true }, function (err) { - if (err) { - console.error('unable to ensureIndex for: ' + field + ' - ' + err); - } + mongo.collection = function get_collection (name) { + return connection.collection(name); + }; + + mongo.ensureIndexes = function ensureIndexes (collection, fields) { + fields.forEach(function(field) { + console.info('ensuring index for: ' + field); + collection.createIndex(field, { 'background': true }, function(err) { + if (err) { + console.error('unable to ensureIndex for: ' + field + ' - ' + err); + } + }); }); - }); - }; + }; - return maybe_connect(cb); -} + return maybe_connect(cb); + } -module.exports = init; + module.exports = init; diff --git a/npm-shrinkwrap.json b/npm-shrinkwrap.json index 891da5af29c..3ba1035c896 100644 --- a/npm-shrinkwrap.json +++ b/npm-shrinkwrap.json @@ -1,6 +1,6 @@ { "name": "nightscout", - "version": "14.0.4", + "version": "14.0.5", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 6a24379ef16..7ae6694563a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "nightscout", - "version": "14.0.4", + "version": "14.0.5", "description": "Nightscout acts as a web-based CGM (Continuous Glucose Montinor) to allow multiple caregivers to remotely view a patients glucose data in realtime.", "license": "AGPL-3.0", "author": "Nightscout Team", diff --git a/static/images/errorcat.jpg b/static/images/errorcat.jpg new file mode 100644 index 00000000000..15c1fb058b0 Binary files /dev/null and b/static/images/errorcat.jpg differ diff --git a/swagger.json b/swagger.json index ffed8fd699c..5d3504ecf60 100755 --- a/swagger.json +++ b/swagger.json @@ -8,7 +8,7 @@ "info": { "title": "Nightscout API", "description": "Own your DData with the Nightscout API", - "version": "14.0.4", + "version": "14.0.5", "license": { "name": "AGPL 3", "url": "https://www.gnu.org/licenses/agpl.txt" diff --git a/swagger.yaml b/swagger.yaml index ce94fe876db..1bd4a02a964 100755 --- a/swagger.yaml +++ b/swagger.yaml @@ -4,7 +4,7 @@ servers: info: title: Nightscout API description: Own your DData with the Nightscout API - version: 14.0.4 + version: 14.0.5 license: name: AGPL 3 url: 'https://www.gnu.org/licenses/agpl.txt' diff --git a/tests/mongo-storage.test.js b/tests/mongo-storage.test.js index 40699946e22..ee706906ff8 100644 --- a/tests/mongo-storage.test.js +++ b/tests/mongo-storage.test.js @@ -36,7 +36,7 @@ describe('mongo storage', function () { (function () { return require('../lib/storage/mongo-storage')(env, false, true); - }).should.throw('MongoDB connection string is missing. Please set MONGO_CONNECTION environment variable'); + }).should.throw('MongoDB connection string is missing. Please set MONGODB_URI environment variable'); done(); }); diff --git a/tests/security.test.js b/tests/security.test.js index 6b612f1bc6e..1f182970019 100644 --- a/tests/security.test.js +++ b/tests/security.test.js @@ -2,14 +2,14 @@ var request = require('supertest'); var should = require('should'); -var load = require('./fixtures/load'); var language = require('../lib/language')(); describe('API_SECRET', function ( ) { - var api = require('../lib/api/'); - + var api; var scope = this; + function setup_app (env, fn) { + api = require('../lib/api/'); require('../lib/server/bootevent')(env, language).boot(function booted (ctx) { ctx.app = api(env, ctx); scope.app = ctx.app; @@ -18,21 +18,6 @@ describe('API_SECRET', function ( ) { }); } - it('should work fine absent', function (done) { - delete process.env.API_SECRET; - var env = require('../env')( ); - should.not.exist(env.api_secret); - setup_app(env, function (ctx) { - - ctx.app.enabled('api').should.equal(false); - ping_status(ctx.app, again); - function again ( ) { - ping_authorized_endpoint(ctx.app, 404, done); - } - }); - }); - - it('should work fail set unauthorized', function (done) { var known = 'b723e97aa97846eb92d5264f084b2823f57c4aa1'; delete process.env.API_SECRET; diff --git a/views/error.html b/views/error.html new file mode 100644 index 00000000000..580618c4071 --- /dev/null +++ b/views/error.html @@ -0,0 +1,64 @@ + + + + + + + + Nightscout - Boot error + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + +

Oops - Nightscout is having trouble

+ +

Don't panic, we can work this out! This happens to the best of us.

+

Check the errors below and then refer to the + troubleshooting documentation.

+ +

Errors occurred during startup:

+
<%- errors %>
+
+ + + + diff --git a/views/service-worker.js b/views/service-worker.js index 0711889012b..d52441d303a 100644 --- a/views/service-worker.js +++ b/views/service-worker.js @@ -3,7 +3,6 @@ var CACHE = '<%= locals.cachebuster %>'; const CACHE_LIST = [ - '/', '/images/launch.png', '/images/apple-touch-icon-57x57.png', '/images/apple-touch-icon-60x60.png',