Browse Source

!!! MASSIVE OVERHAUL !!!

As the title says, this commit is a massive overhaul.
I've rewritten/restrucuted almost everything in the controller scripts.
Because of that, there's a considerable possibility that I've broken
something somewhere.

Notable changes:

Added temporary uploads.

Removed file name length changer from dashboard,
in favor of an equivalent in homepage config tab.
This allows non-registered users to also set file name length.

A bunch of other undocmented stuff.
I don't know, I'm too tired to remember them all.
Bobby Wibowo 1 month ago
parent
commit
02e2e402c3

+ 3 - 1
.eslintrc.json

@@ -15,6 +15,8 @@
       "multi",
       "consistent"
     ],
+    "no-throw-literal": 0,
+    "no-var": "error",
     "prefer-const": [
       "error",
       {
@@ -30,6 +32,6 @@
       "error",
       "single"
     ],
-    "no-var": "error"
+    "standard/no-callback-literal": 0
   }
 }

+ 35 - 4
config.sample.js

@@ -196,6 +196,38 @@ module.exports = {
     */
     urlExtensionsFilter: [],
 
+    /*
+      An array of allowed ages for uploads (in hours).
+
+      Default age will be the value at the very top of the array.
+      If the array is populated but do not have a zero value,
+      permanent uploads will be rejected.
+      This only applies to new files uploaded after enabling the option.
+
+      If the array is empty or is set to falsy value, temporary uploads
+      feature will be disabled, and all uploads will be permanent (original behavior).
+
+      When temporary uploads feature is disabled, any existing temporary uploads
+      will not ever be automatically deleted, since the safe will not start the
+      periodical checkup task.
+    */
+    temporaryUploadAges: [
+      0, // permanent
+      1 / 60 * 15, // 15 minutes
+      1 / 60 * 30, // 30 minutes
+      1, // 1 hour
+      6, // 6 hours
+      12, // 12 hours
+      24, // 24 hours (1 day)
+      168 // 168 hours (7 days)
+    ],
+
+    /*
+      Interval of the periodical check up tasks for temporary uploads (in milliseconds).
+      NOTE: Set to falsy value if you prefer to use your own external script.
+    */
+    temporaryUploadsInterval: 1 * 60000, // 1 minute
+
     /*
       Scan files using ClamAV through clamd.
     */
@@ -248,10 +280,9 @@ module.exports = {
       may not be used by more than a single file (e.i. if "abcd.jpg" already exists, a new PNG
       file may not be named as "abcd.png").
 
-      If this is enabled, the safe will then attempt to read file list of the uploads directory
-      during first launch, parse the names, then cache the identifiers into memory.
-      Its downside is that it will use a bit more memory, generally a few MBs increase
-      on a safe with over >10k uploads.
+      If this is enabled, the safe will query files from the database during first launch,
+      parse their names, then cache the identifiers into memory.
+      Its downside is that it will use a bit more memory.
 
       If this is disabled, collision check will become less strict.
       As in, the same identifier may be used by multiple different extensions (e.i. if "abcd.jpg"

+ 359 - 313
controllers/albumsController.js

@@ -4,40 +4,68 @@ const EventEmitter = require('events')
 const fs = require('fs')
 const logger = require('./../logger')
 const path = require('path')
+const paths = require('./pathsController')
 const randomstring = require('randomstring')
 const utils = require('./utilsController')
 const Zip = require('jszip')
 
-const albumsController = {}
+const self = {
+  onHold: new Set()
+}
 
-const maxTries = config.uploads.maxTries || 1
 const homeDomain = config.homeDomain || config.domain
-const uploadsDir = path.resolve(config.uploads.folder)
-const zipsDir = path.join(uploadsDir, 'zips')
-const zipMaxTotalSize = config.cloudflare.zipMaxTotalSize
-const zipMaxTotalSizeBytes = parseInt(config.cloudflare.zipMaxTotalSize) * 1000000
+
+const zipMaxTotalSize = parseInt(config.cloudflare.zipMaxTotalSize)
+const zipMaxTotalSizeBytes = config.cloudflare.zipMaxTotalSize * 1000000
 const zipOptions = config.uploads.jsZipOptions
 
 // Force 'type' option to 'nodebuffer'
 zipOptions.type = 'nodebuffer'
 
 // Apply fallbacks for missing config values
-if (zipOptions.streamFiles === undefined) zipOptions.streamFiles = true
-if (zipOptions.compression === undefined) zipOptions.compression = 'DEFLATE'
+if (zipOptions.streamFiles === undefined)
+  zipOptions.streamFiles = true
+if (zipOptions.compression === undefined)
+  zipOptions.compression = 'DEFLATE'
 if (zipOptions.compressionOptions === undefined || zipOptions.compressionOptions.level === undefined)
   zipOptions.compressionOptions = { level: 1 }
 
-albumsController.zipEmitters = new Map()
+self.zipEmitters = new Map()
 
 class ZipEmitter extends EventEmitter {
   constructor (identifier) {
     super()
     this.identifier = identifier
-    this.once('done', () => albumsController.zipEmitters.delete(this.identifier))
+    this.once('done', () => self.zipEmitters.delete(this.identifier))
+  }
+}
+
+self.getUniqueRandomName = async () => {
+  for (let i = 0; i < utils.idMaxTries; i++) {
+    const identifier = randomstring.generate(config.uploads.albumIdentifierLength)
+    if (self.onHold.has(identifier))
+      continue
+
+    // Put token on-hold (wait for it to be inserted to DB)
+    self.onHold.add(identifier)
+
+    const album = await db.table('albums')
+      .where('identifier', identifier)
+      .select('id')
+      .first()
+    if (album) {
+      self.onHold.delete(identifier)
+      logger.log(`Album with identifier ${identifier} already exists (${i + 1}/${utils.idMaxTries}).`)
+      continue
+    }
+
+    return identifier
   }
+
+  throw 'Sorry, we could not allocate a unique random identifier. Try again?'
 }
 
-albumsController.list = async (req, res, next) => {
+self.list = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
@@ -55,88 +83,77 @@ albumsController.list = async (req, res, next) => {
   if (req.params.sidebar !== undefined)
     return res.json({ success: true, albums })
 
-  const ids = []
+  const albumids = {}
   for (const album of albums) {
     album.download = album.download !== 0
     album.public = album.public !== 0
-
-    ids.push(album.id)
+    album.files = 0
+    // Map by IDs
+    albumids[album.id] = album
   }
 
   const files = await db.table('files')
-    .whereIn('albumid', ids)
+    .whereIn('albumid', Object.keys(albumids))
     .select('albumid')
-  const albumsCount = {}
 
-  for (const id of ids) albumsCount[id] = 0
-  for (const file of files) albumsCount[file.albumid] += 1
-  for (const album of albums) album.files = albumsCount[album.id]
+  // Increment files count
+  for (const file of files)
+    if (albumids[file.albumid])
+      albumids[file.albumid].files++
 
   return res.json({ success: true, albums, homeDomain })
 }
 
-albumsController.create = async (req, res, next) => {
+self.create = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
-  const name = utils.escape(req.body.name)
-  if (name === undefined || name === '')
+  const name = typeof req.body.name === 'string'
+    ? utils.escape(req.body.name.trim())
+    : ''
+
+  if (!name)
     return res.json({ success: false, description: 'No album name specified.' })
 
-  const album = await db.table('albums')
-    .where({
-      name,
-      enabled: 1,
-      userid: user.id
-    })
-    .first()
+  try {
+    const album = await db.table('albums')
+      .where({
+        name,
+        enabled: 1,
+        userid: user.id
+      })
+      .first()
 
-  if (album)
-    return res.json({ success: false, description: 'There\'s already an album with that name.' })
+    if (album)
+      return res.json({ success: false, description: 'There is already an album with that name.' })
 
-  const identifier = await albumsController.getUniqueRandomName()
-    .catch(error => {
-      res.json({ success: false, description: error.toString() })
+    const identifier = await self.getUniqueRandomName()
+
+    const ids = await db.table('albums').insert({
+      name,
+      enabled: 1,
+      userid: user.id,
+      identifier,
+      timestamp: Math.floor(Date.now() / 1000),
+      editedAt: 0,
+      zipGeneratedAt: 0,
+      download: (req.body.download === false || req.body.download === 0) ? 0 : 1,
+      public: (req.body.public === false || req.body.public === 0) ? 0 : 1,
+      description: typeof req.body.description === 'string'
+        ? utils.escape(req.body.description.trim())
+        : ''
     })
-  if (!identifier) return
-
-  const ids = await db.table('albums').insert({
-    name,
-    enabled: 1,
-    userid: user.id,
-    identifier,
-    timestamp: Math.floor(Date.now() / 1000),
-    editedAt: 0,
-    zipGeneratedAt: 0,
-    download: (req.body.download === false || req.body.download === 0) ? 0 : 1,
-    public: (req.body.public === false || req.body.public === 0) ? 0 : 1,
-    description: utils.escape(req.body.description) || ''
-  })
-  utils.invalidateStatsCache('albums')
-
-  return res.json({ success: true, id: ids[0] })
-}
+    utils.invalidateStatsCache('albums')
+    self.onHold.delete(identifier)
 
-albumsController.getUniqueRandomName = () => {
-  return new Promise((resolve, reject) => {
-    const select = i => {
-      const identifier = randomstring.generate(config.uploads.albumIdentifierLength)
-      db.table('albums')
-        .where('identifier', identifier)
-        .then(rows => {
-          if (!rows || !rows.length) return resolve(identifier)
-          logger.log(`An album with identifier ${identifier} already exists (${++i}/${maxTries}).`)
-          if (i < maxTries) return select(i)
-          // eslint-disable-next-line prefer-promise-reject-errors
-          return reject('Sorry, we could not allocate a unique random identifier. Try again?')
-        })
-    }
-    // Get us a unique random identifier
-    select(0)
-  })
+    return res.json({ success: true, id: ids[0] })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-albumsController.delete = async (req, res, next) => {
+self.delete = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
@@ -145,52 +162,51 @@ albumsController.delete = async (req, res, next) => {
   if (id === undefined || id === '')
     return res.json({ success: false, description: 'No album specified.' })
 
-  let failed = []
-  if (purge) {
-    const files = await db.table('files')
-      .where({
-        albumid: id,
-        userid: user.id
-      })
-
-    if (files.length) {
-      const ids = files.map(file => file.id)
-      failed = await utils.bulkDeleteFiles('id', ids, user)
+  try {
+    if (purge) {
+      const files = await db.table('files')
+        .where({
+          albumid: id,
+          userid: user.id
+        })
 
-      if (failed.length === ids.length)
-        return res.json({ success: false, description: 'Could not delete any of the files associated with the album.' })
+      if (files.length) {
+        const ids = files.map(file => file.id)
+        const failed = await utils.bulkDeleteFromDb('id', ids, user)
+        if (failed.length)
+          return res.json({ success: false, failed })
+      }
     }
-  }
 
-  await db.table('albums')
-    .where({
-      id,
-      userid: user.id
-    })
-    .update('enabled', 0)
-  utils.invalidateStatsCache('albums')
+    await db.table('albums')
+      .where({
+        id,
+        userid: user.id
+      })
+      .update('enabled', 0)
+    utils.invalidateStatsCache('albums')
 
-  const identifier = await db.table('albums')
-    .select('identifier')
-    .where({
-      id,
-      userid: user.id
-    })
-    .first()
-    .then(row => row.identifier)
+    const identifier = await db.table('albums')
+      .select('identifier')
+      .where({
+        id,
+        userid: user.id
+      })
+      .first()
+      .then(row => row.identifier)
 
-  // Unlink zip archive of the album if it exists
-  const zipPath = path.join(zipsDir, `${identifier}.zip`)
-  fs.unlink(zipPath, error => {
+    await paths.unlink(path.join(paths.zips, `${identifier}.zip`))
+  } catch (error) {
     if (error && error.code !== 'ENOENT') {
       logger.error(error)
-      return res.json({ success: false, description: error.toString(), failed })
+      return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
     }
-    res.json({ success: true, failed })
-  })
+  }
+
+  return res.json({ success: true })
 }
 
-albumsController.edit = async (req, res, next) => {
+self.edit = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
@@ -198,127 +214,139 @@ albumsController.edit = async (req, res, next) => {
   if (isNaN(id))
     return res.json({ success: false, description: 'No album specified.' })
 
-  const name = utils.escape(req.body.name)
-  if (name === undefined || name === '')
-    return res.json({ success: false, description: 'No name specified.' })
+  const name = typeof req.body.name === 'string'
+    ? utils.escape(req.body.name.trim())
+    : ''
 
-  const album = await db.table('albums')
-    .where({
-      id,
-      userid: user.id,
-      enabled: 1
-    })
-    .first()
+  if (!name)
+    return res.json({ success: false, description: 'No name specified.' })
 
-  if (!album)
-    return res.json({ success: false, description: 'Could not get album with the specified ID.' })
-  else if (album.id !== id)
-    return res.json({ success: false, description: 'Name already in use.' })
-  else if (req._old && (album.id === id))
-    // Old rename API
-    return res.json({ success: false, description: 'You did not specify a new name.' })
+  try {
+    const album = await db.table('albums')
+      .where({
+        id,
+        userid: user.id,
+        enabled: 1
+      })
+      .first()
 
-  await db.table('albums')
-    .where({
-      id,
-      userid: user.id
-    })
-    .update({
-      name,
-      download: Boolean(req.body.download),
-      public: Boolean(req.body.public),
-      description: utils.escape(req.body.description) || ''
-    })
-  utils.invalidateStatsCache('albums')
+    if (!album)
+      return res.json({ success: false, description: 'Could not get album with the specified ID.' })
+    else if (album.id !== id)
+      return res.json({ success: false, description: 'Name already in use.' })
+    else if (req._old && (album.id === id))
+      // Old rename API
+      return res.json({ success: false, description: 'You did not specify a new name.' })
 
-  if (req.body.requestLink) {
-    const oldIdentifier = await db.table('albums')
-      .select('identifier')
+    await db.table('albums')
       .where({
         id,
         userid: user.id
       })
-      .first()
-      .then(row => row.identifier)
-
-    const identifier = await albumsController.getUniqueRandomName()
-      .catch(error => {
-        res.json({ success: false, description: error.toString() })
+      .update({
+        name,
+        download: Boolean(req.body.download),
+        public: Boolean(req.body.public),
+        description: typeof req.body.description === 'string'
+          ? utils.escape(req.body.description.trim())
+          : ''
       })
-    if (!identifier) return
+    utils.invalidateStatsCache('albums')
+
+    if (!req.body.requestLink)
+      return res.json({ success: true, name })
+
+    const oldIdentifier = album.identifier
+    const newIdentifier = await self.getUniqueRandomName()
 
     await db.table('albums')
       .where({
         id,
         userid: user.id
       })
-      .update('identifier', identifier)
+      .update('identifier', newIdentifier)
+    utils.invalidateStatsCache('albums')
+    self.onHold.delete(newIdentifier)
 
     // Rename zip archive of the album if it exists
-    const zipPath = path.join(zipsDir, `${oldIdentifier}.zip`)
-    return fs.access(zipPath, error => {
-      if (error) return res.json({ success: true, identifier })
-      fs.rename(zipPath, path.join(zipsDir, `${identifier}.zip`), error => {
-        if (!error) return res.json({ success: true, identifier })
-        logger.error(error)
-        res.json({ success: false, description: error.toString() })
-      })
+    try {
+      const oldZip = path.join(paths.zips, `${oldIdentifier}.zip`)
+      // await paths.access(oldZip)
+      const newZip = path.join(paths.zips, `${newIdentifier}.zip`)
+      await paths.rename(oldZip, newZip)
+    } catch (err) {
+      // Re-throw error
+      if (err.code !== 'ENOENT')
+        throw err
+    }
+
+    return res.json({
+      success: true,
+      identifier: newIdentifier
     })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
   }
-
-  return res.json({ success: true, name })
 }
 
-albumsController.rename = async (req, res, next) => {
+self.rename = async (req, res, next) => {
   req._old = true
   req.body = { name: req.body.name }
-  return albumsController.edit(req, res, next)
+  return self.edit(req, res, next)
 }
 
-albumsController.get = async (req, res, next) => {
-  // TODO: Something, can't remember...
+self.get = async (req, res, next) => {
   const identifier = req.params.identifier
   if (identifier === undefined)
     return res.status(401).json({ success: false, description: 'No identifier provided.' })
 
-  const album = await db.table('albums')
-    .where({
-      identifier,
-      enabled: 1
-    })
-    .first()
+  try {
+    const album = await db.table('albums')
+      .where({
+        identifier,
+        enabled: 1
+      })
+      .first()
 
-  if (!album)
-    return res.json({ success: false, description: 'Album not found.' })
-  else if (album.public === 0)
-    return res.status(401).json({
-      success: false,
-      description: 'This album is not available for public.'
-    })
+    if (!album)
+      return res.json({
+        success: false,
+        description: 'Album not found.'
+      })
+    else if (album.public === 0)
+      return res.status(403).json({
+        success: false,
+        description: 'This album is not available for public.'
+      })
 
-  const title = album.name
-  const files = await db.table('files')
-    .select('name')
-    .where('albumid', album.id)
-    .orderBy('id', 'DESC')
+    const title = album.name
+    const files = await db.table('files')
+      .select('name')
+      .where('albumid', album.id)
+      .orderBy('id', 'DESC')
 
-  for (const file of files) {
-    file.file = `${config.domain}/${file.name}`
+    for (const file of files) {
+      file.file = `${config.domain}/${file.name}`
 
-    const extname = utils.extname(file.name)
-    if (utils.mayGenerateThumb(extname))
-      file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png`
-  }
+      const extname = utils.extname(file.name)
+      if (utils.mayGenerateThumb(extname))
+        file.thumb = `${config.domain}/thumbs/${file.name.slice(0, -extname.length)}.png`
+    }
 
-  return res.json({
-    success: true,
-    title,
-    count: files.length,
-    files
-  })
+    return res.json({
+      success: true,
+      title,
+      count: files.length,
+      files
+    })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occcured. Try again?' })
+  }
 }
 
-albumsController.generateZip = async (req, res, next) => {
+self.generateZip = async (req, res, next) => {
   const versionString = parseInt(req.query.v)
   const download = (filePath, fileName) => {
     const headers = {}
@@ -337,160 +365,178 @@ albumsController.generateZip = async (req, res, next) => {
     })
 
   if (!config.uploads.generateZips)
-    return res.status(401).json({ success: false, description: 'Zip generation disabled.' })
-
-  const album = await db.table('albums')
-    .where({
-      identifier,
-      enabled: 1
+    return res.status(401).json({
+      success: false,
+      description: 'Zip generation disabled.'
     })
-    .first()
-
-  if (!album)
-    return res.json({ success: false, description: 'Album not found.' })
-  else if (album.download === 0)
-    return res.json({ success: false, description: 'Download for this album is disabled.' })
-
-  if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
-    return res.redirect(`${album.identifier}?v=${album.editedAt}`)
-
-  if (album.zipGeneratedAt > album.editedAt) {
-    const filePath = path.join(zipsDir, `${identifier}.zip`)
-    const exists = await new Promise(resolve => fs.access(filePath, error => resolve(!error)))
-    if (exists) {
-      const fileName = `${album.name}.zip`
-      return download(filePath, fileName)
+
+  try {
+    const album = await db.table('albums')
+      .where({
+        identifier,
+        enabled: 1
+      })
+      .first()
+
+    if (!album)
+      return res.json({ success: false, description: 'Album not found.' })
+    else if (album.download === 0)
+      return res.json({ success: false, description: 'Download for this album is disabled.' })
+
+    if ((isNaN(versionString) || versionString <= 0) && album.editedAt)
+      return res.redirect(`${album.identifier}?v=${album.editedAt}`)
+
+    if (album.zipGeneratedAt > album.editedAt) {
+      const filePath = path.join(paths.zips, `${identifier}.zip`)
+      const exists = await new Promise(resolve => fs.access(filePath, error => resolve(!error)))
+      if (exists) {
+        const fileName = `${album.name}.zip`
+        return download(filePath, fileName)
+      }
     }
-  }
 
-  if (albumsController.zipEmitters.has(identifier)) {
-    logger.log(`Waiting previous zip task for album: ${identifier}.`)
-    return albumsController.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
-      if (filePath && fileName)
-        download(filePath, fileName)
-      else if (json)
-        res.json(json)
-    })
-  }
+    if (self.zipEmitters.has(identifier)) {
+      logger.log(`Waiting previous zip task for album: ${identifier}.`)
+      return self.zipEmitters.get(identifier).once('done', (filePath, fileName, json) => {
+        if (filePath && fileName)
+          download(filePath, fileName)
+        else if (json)
+          res.json(json)
+      })
+    }
 
-  albumsController.zipEmitters.set(identifier, new ZipEmitter(identifier))
+    self.zipEmitters.set(identifier, new ZipEmitter(identifier))
 
-  logger.log(`Starting zip task for album: ${identifier}.`)
-  const files = await db.table('files')
-    .select('name', 'size')
-    .where('albumid', album.id)
-  if (files.length === 0) {
-    logger.log(`Finished zip task for album: ${identifier} (no files).`)
-    const json = { success: false, description: 'There are no files in the album.' }
-    albumsController.zipEmitters.get(identifier).emit('done', null, null, json)
-    return res.json(json)
-  }
+    logger.log(`Starting zip task for album: ${identifier}.`)
 
-  if (zipMaxTotalSize) {
-    const totalSizeBytes = files.reduce((accumulator, file) => accumulator + parseInt(file.size), 0)
-    if (totalSizeBytes > zipMaxTotalSizeBytes) {
-      logger.log(`Finished zip task for album: ${identifier} (size exceeds).`)
+    const files = await db.table('files')
+      .select('name', 'size')
+      .where('albumid', album.id)
+    if (files.length === 0) {
+      logger.log(`Finished zip task for album: ${identifier} (no files).`)
       const json = {
         success: false,
-        description: `Total size of all files in the album exceeds the configured limit (${zipMaxTotalSize}).`
+        description: 'There are no files in the album.'
       }
-      albumsController.zipEmitters.get(identifier).emit('done', null, null, json)
+      self.zipEmitters.get(identifier).emit('done', null, null, json)
       return res.json(json)
     }
-  }
 
-  const zipPath = path.join(zipsDir, `${album.identifier}.zip`)
-  const archive = new Zip()
+    if (zipMaxTotalSize) {
+      const totalSizeBytes = files.reduce((accumulator, file) => accumulator + parseInt(file.size), 0)
+      if (totalSizeBytes > zipMaxTotalSizeBytes) {
+        logger.log(`Finished zip task for album: ${identifier} (size exceeds).`)
+        const json = {
+          success: false,
+          description: `Total size of all files in the album exceeds the configured limit (${zipMaxTotalSize} MB).`
+        }
+        self.zipEmitters.get(identifier).emit('done', null, null, json)
+        return res.json(json)
+      }
+    }
 
-  let iteration = 0
-  for (const file of files)
-    fs.readFile(path.join(uploadsDir, file.name), (error, data) => {
-      if (error)
-        logger.error(error)
-      else
-        archive.file(file.name, data)
+    const zipPath = path.join(paths.zips, `${album.identifier}.zip`)
+    const archive = new Zip()
 
-      iteration++
-      if (iteration === files.length)
-        archive
-          .generateNodeStream(zipOptions)
+    try {
+      for (const file of files) {
+        const data = await paths.readFile(path.join(paths.uploads, file.name))
+        archive.file(file.name, data)
+      }
+      await new Promise((resolve, reject) => {
+        archive.generateNodeStream(zipOptions)
           .pipe(fs.createWriteStream(zipPath))
-          .on('finish', async () => {
-            logger.log(`Finished zip task for album: ${identifier} (success).`)
-            await db.table('albums')
-              .where('id', album.id)
-              .update('zipGeneratedAt', Math.floor(Date.now() / 1000))
-
-            const filePath = path.join(zipsDir, `${identifier}.zip`)
-            const fileName = `${album.name}.zip`
-
-            albumsController.zipEmitters.get(identifier).emit('done', filePath, fileName)
-            utils.invalidateStatsCache('albums')
-            return download(filePath, fileName)
-          })
-    })
+          .on('error', error => reject(error))
+          .on('finish', () => resolve())
+      })
+    } catch (error) {
+      logger.error(error)
+      return res.status(500).json({
+        success: 'false',
+        description: error.toString()
+      })
+    }
+
+    logger.log(`Finished zip task for album: ${identifier} (success).`)
+
+    await db.table('albums')
+      .where('id', album.id)
+      .update('zipGeneratedAt', Math.floor(Date.now() / 1000))
+    utils.invalidateStatsCache('albums')
+
+    const filePath = path.join(paths.zips, `${identifier}.zip`)
+    const fileName = `${album.name}.zip`
+
+    self.zipEmitters.get(identifier).emit('done', filePath, fileName)
+    return download(filePath, fileName)
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-albumsController.addFiles = async (req, res, next) => {
+self.addFiles = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
   const ids = req.body.ids
-  if (!ids || !ids.length)
+  if (!Array.isArray(ids) || !ids.length)
     return res.json({ success: false, description: 'No files specified.' })
 
-  let albumid = req.body.albumid
-  if (typeof albumid !== 'number') albumid = parseInt(albumid)
-  if (isNaN(albumid) || (albumid < 0)) albumid = null
+  let albumid = parseInt(req.body.albumid)
+  if (isNaN(albumid) || albumid < 0) albumid = null
 
+  let failed = []
   const albumids = []
+  try {
+    if (albumid !== null) {
+      const album = await db.table('albums')
+        .where('id', albumid)
+        .where(function () {
+          if (user.username !== 'root')
+            this.where('userid', user.id)
+        })
+        .first()
 
-  if (albumid !== null) {
-    const album = await db.table('albums')
-      .where('id', albumid)
-      .where(function () {
-        if (user.username !== 'root')
-          this.where('userid', user.id)
-      })
-      .first()
-
-    if (!album)
-      return res.json({ success: false, description: 'Album doesn\'t exist or it doesn\'t belong to the user.' })
+      if (!album)
+        return res.json({
+          success: false,
+          description: 'Album does not exist or it does not belong to the user.'
+        })
 
-    albumids.push(albumid)
-  }
+      albumids.push(albumid)
+    }
 
-  const files = await db.table('files')
-    .whereIn('id', ids)
-    .where(function () {
-      if (user.username !== 'root')
-        this.where('userid', user.id)
-    })
+    const files = await db.table('files')
+      .whereIn('id', ids)
+      .where('userid', user.id)
 
-  const failed = ids.filter(id => !files.find(file => file.id === id))
+    failed = ids.filter(id => !files.find(file => file.id === id))
 
-  const updateDb = await db.table('files')
-    .whereIn('id', files.map(file => file.id))
-    .update('albumid', albumid)
-    .catch(logger.error)
+    await db.table('files')
+      .whereIn('id', files.map(file => file.id))
+      .update('albumid', albumid)
 
-  if (!updateDb)
-    return res.json({
-      success: false,
-      description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.`
+    files.forEach(file => {
+      if (file.albumid && !albumids.includes(file.albumid))
+        albumids.push(file.albumid)
     })
 
-  files.forEach(file => {
-    if (file.albumid && !albumids.includes(file.albumid))
-      albumids.push(file.albumid)
-  })
-
-  await db.table('albums')
-    .whereIn('id', albumids)
-    .update('editedAt', Math.floor(Date.now() / 1000))
-    .catch(logger.error)
-
-  return res.json({ success: true, failed })
+    await db.table('albums')
+      .whereIn('id', albumids)
+      .update('editedAt', Math.floor(Date.now() / 1000))
+
+    return res.json({ success: true, failed })
+  } catch (error) {
+    logger.error(error)
+    if (failed.length === ids.length)
+      return res.json({
+        success: false,
+        description: `Could not ${albumid === null ? 'add' : 'remove'} any files ${albumid === null ? 'to' : 'from'} the album.`
+      })
+    else
+      return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-module.exports = albumsController
+module.exports = self

+ 157 - 201
controllers/authController.js

@@ -1,3 +1,4 @@
+const { promisify } = require('util')
 const bcrypt = require('bcrypt')
 const config = require('./../config')
 const db = require('knex')(config.database)
@@ -7,160 +8,119 @@ const randomstring = require('randomstring')
 const tokens = require('./tokenController')
 const utils = require('./utilsController')
 
-const authController = {}
-
-authController.verify = async (req, res, next) => {
-  let username = req.body.username
-  let password = req.body.password
+const self = {
+  compare: promisify(bcrypt.compare),
+  hash: promisify(bcrypt.hash)
+}
 
-  if (username === undefined)
+self.verify = async (req, res, next) => {
+  const username = typeof req.body.username === 'string'
+    ? req.body.username.trim()
+    : ''
+  if (!username)
     return res.json({ success: false, description: 'No username provided.' })
-  if (password === undefined)
-    return res.json({ success: false, description: 'No password provided.' })
-
-  username = username.trim()
-  password = password.trim()
 
-  const user = await db.table('users').where('username', username).first()
-  if (!user)
-    return res.json({ success: false, description: 'Username does not exist.' })
-
-  if (user.enabled === false || user.enabled === 0)
-    return res.json({ success: false, description: 'This account has been disabled.' })
+  const password = typeof req.body.password === 'string'
+    ? req.body.password.trim()
+    : ''
+  if (!password)
+    return res.json({ success: false, description: 'No password provided.' })
 
-  bcrypt.compare(password, user.password, (error, result) => {
-    if (error) {
-      logger.error(error)
-      return res.json({ success: false, description: 'There was an error.' })
-    }
-    if (result === false) return res.json({ success: false, description: 'Wrong password.' })
-    return res.json({ success: true, token: user.token })
-  })
+  try {
+    const user = await db.table('users')
+      .where('username', username)
+      .first()
+
+    if (!user)
+      return res.json({ success: false, description: 'Username does not exist.' })
+
+    if (user.enabled === false || user.enabled === 0)
+      return res.json({ success: false, description: 'This account has been disabled.' })
+
+    const result = await self.compare(password, user.password)
+    if (result === false)
+      return res.json({ success: false, description: 'Wrong password.' })
+    else
+      return res.json({ success: true, token: user.token })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-authController.register = async (req, res, next) => {
+self.register = async (req, res, next) => {
   if (config.enableUserAccounts === false)
-    return res.json({ success: false, description: 'Register is disabled at the moment.' })
-
-  let username = req.body.username
-  let password = req.body.password
-
-  if (username === undefined)
-    return res.json({ success: false, description: 'No username provided.' })
-  if (password === undefined)
-    return res.json({ success: false, description: 'No password provided.' })
-
-  username = username.trim()
-  password = password.trim()
+    return res.json({ success: false, description: 'Registration is currently disabled.' })
 
+  const username = typeof req.body.username === 'string'
+    ? req.body.username.trim()
+    : ''
   if (username.length < 4 || username.length > 32)
     return res.json({ success: false, description: 'Username must have 4-32 characters.' })
 
+  const password = typeof req.body.password === 'string'
+    ? req.body.password.trim()
+    : ''
   if (password.length < 6 || password.length > 64)
     return res.json({ success: false, description: 'Password must have 6-64 characters.' })
 
-  const user = await db.table('users').where('username', username).first()
-  if (user)
-    return res.json({ success: false, description: 'Username already exists.' })
+  try {
+    const user = await db.table('users')
+      .where('username', username)
+      .first()
 
-  bcrypt.hash(password, 10, async (error, hash) => {
-    if (error) {
-      logger.error(error)
-      return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' })
-    }
+    if (user)
+      return res.json({ success: false, description: 'Username already exists.' })
+
+    const hash = await self.hash(password, 10)
 
     const token = await tokens.generateUniqueToken()
     if (!token)
-      return res.json({ success: false, description: 'Error generating unique token (╯°□°)╯︵ ┻━┻.' })
-
-    await db.table('users').insert({
-      username,
-      password: hash,
-      token,
-      enabled: 1,
-      permission: perms.permissions.user
-    })
+      return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
 
+    await db.table('users')
+      .insert({
+        username,
+        password: hash,
+        token,
+        enabled: 1,
+        permission: perms.permissions.user
+      })
     utils.invalidateStatsCache('users')
+    token.onHold.delete(token)
+
     return res.json({ success: true, token })
-  })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-authController.changePassword = async (req, res, next) => {
+self.changePassword = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
-  const password = req.body.password
-  if (password === undefined)
-    return res.json({ success: false, description: 'No password provided.' })
-
+  const password = typeof req.body.password === 'string'
+    ? req.body.password.trim()
+    : ''
   if (password.length < 6 || password.length > 64)
     return res.json({ success: false, description: 'Password must have 6-64 characters.' })
 
-  bcrypt.hash(password, 10, async (error, hash) => {
-    if (error) {
-      logger.error(error)
-      return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' })
-    }
+  try {
+    const hash = await self.hash(password, 10)
 
     await db.table('users')
       .where('id', user.id)
       .update('password', hash)
 
     return res.json({ success: true })
-  })
-}
-
-authController.getFileLengthConfig = async (req, res, next) => {
-  const user = await utils.authorize(req, res)
-  if (!user) return
-  return res.json({
-    success: true,
-    fileLength: user.fileLength,
-    config: config.uploads.fileLength
-  })
-}
-
-authController.changeFileLength = async (req, res, next) => {
-  if (config.uploads.fileLength.userChangeable === false)
-    return res.json({
-      success: false,
-      description: 'Changing file name length is disabled at the moment.'
-    })
-
-  const user = await utils.authorize(req, res)
-  if (!user) return
-
-  const fileLength = parseInt(req.body.fileLength)
-  if (fileLength === undefined)
-    return res.json({
-      success: false,
-      description: 'No file name length provided.'
-    })
-
-  if (isNaN(fileLength))
-    return res.json({
-      success: false,
-      description: 'File name length is not a valid number.'
-    })
-
-  if (fileLength < config.uploads.fileLength.min || fileLength > config.uploads.fileLength.max)
-    return res.json({
-      success: false,
-      description: `File name length must be ${config.uploads.fileLength.min} to ${config.uploads.fileLength.max} characters.`
-    })
-
-  if (fileLength === user.fileLength)
-    return res.json({ success: true })
-
-  await db.table('users')
-    .where('id', user.id)
-    .update('fileLength', fileLength)
-
-  return res.json({ success: true })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-authController.editUser = async (req, res, next) => {
+self.editUser = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
@@ -168,67 +128,61 @@ authController.editUser = async (req, res, next) => {
   if (isNaN(id))
     return res.json({ success: false, description: 'No user specified.' })
 
-  const target = await db.table('users')
-    .where('id', id)
-    .first()
-
-  if (!target)
-    return res.json({ success: false, description: 'Could not get user with the specified ID.' })
-  else if (!perms.higher(user, target))
-    return res.json({ success: false, description: 'The user is in the same or higher group as you.' })
-  else if (target.username === 'root')
-    return res.json({ success: false, description: 'Root user may not be edited.' })
-
-  const update = {}
+  try {
+    const target = await db.table('users')
+      .where('id', id)
+      .first()
 
-  if (req.body.username !== undefined) {
-    update.username = `${req.body.username}`
-    if (update.username.length < 4 || update.username.length > 32)
-      return res.json({ success: false, description: 'Username must have 4-32 characters.' })
-  }
+    if (!target)
+      return res.json({ success: false, description: 'Could not get user with the specified ID.' })
+    else if (!perms.higher(user, target))
+      return res.json({ success: false, description: 'The user is in the same or higher group as you.' })
+    else if (target.username === 'root')
+      return res.json({ success: false, description: 'Root user may not be edited.' })
 
-  if (req.body.enabled !== undefined)
-    update.enabled = Boolean(req.body.enabled)
+    const update = {}
 
-  if (req.body.group !== undefined) {
-    update.permission = perms.permissions[req.body.group] || target.permission
-    if (typeof update.permission !== 'number' || update.permission < 0)
-      update.permission = target.permission
-  }
+    if (req.body.username !== undefined) {
+      update.username = String(req.body.username).trim()
+      if (update.username.length < 4 || update.username.length > 32)
+        return res.json({ success: false, description: 'Username must have 4-32 characters.' })
+    }
 
-  await db.table('users')
-    .where('id', id)
-    .update(update)
-  utils.invalidateStatsCache('users')
+    if (req.body.enabled !== undefined)
+      update.enabled = Boolean(req.body.enabled)
 
-  if (!req.body.resetPassword)
-    return res.json({ success: true, update })
+    if (req.body.group !== undefined) {
+      update.permission = perms.permissions[req.body.group] || target.permission
+      if (typeof update.permission !== 'number' || update.permission < 0)
+        update.permission = target.permission
+    }
 
-  const password = randomstring.generate(16)
-  bcrypt.hash(password, 10, async (error, hash) => {
-    if (error) {
-      logger.error(error)
-      return res.json({ success: false, description: 'Error generating password hash (╯°□°)╯︵ ┻━┻.' })
+    let password
+    if (req.body.resetPassword) {
+      password = randomstring.generate(16)
+      update.password = await self.hash(password, 10)
     }
 
     await db.table('users')
       .where('id', id)
-      .update('password', hash)
+      .update(update)
+    utils.invalidateStatsCache('users')
 
-    return res.json({ success: true, update, password })
-  })
+    const response = { success: true, update }
+    if (password) response.password = password
+    return res.json(response)
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-authController.disableUser = async (req, res, next) => {
-  const body = {
-    id: req.body.id,
-    enabled: false
-  }
-  req.body = body
-  return authController.editUser(req, res, next)
+self.disableUser = async (req, res, next) => {
+  req.body = { id: req.body.id, enabled: false }
+  return self.editUser(req, res, next)
 }
 
-authController.listUsers = async (req, res, next) => {
+self.listUsers = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
@@ -236,53 +190,55 @@ authController.listUsers = async (req, res, next) => {
   if (!isadmin)
     return res.status(403).end()
 
-  const count = await db.table('users')
-    .count('id as count')
-    .then(rows => rows[0].count)
-  if (!count)
-    return res.json({ success: true, users: [], count })
+  try {
+    const count = await db.table('users')
+      .count('id as count')
+      .then(rows => rows[0].count)
+    if (!count)
+      return res.json({ success: true, users: [], count })
 
-  let offset = req.params.page
-  if (offset === undefined) offset = 0
+    let offset = req.params.page
+    if (offset === undefined) offset = 0
 
-  const users = await db.table('users')
-    .limit(25)
-    .offset(25 * offset)
-    .select('id', 'username', 'enabled', 'fileLength', 'permission')
+    const users = await db.table('users')
+      .limit(25)
+      .offset(25 * offset)
+      .select('id', 'username', 'enabled', 'permission')
 
-  const userids = []
+    const userids = []
 
-  for (const user of users) {
-    user.groups = perms.mapPermissions(user)
-    delete user.permission
+    for (const user of users) {
+      user.groups = perms.mapPermissions(user)
+      delete user.permission
 
-    userids.push(user.id)
-    user.uploadsCount = 0
-    user.diskUsage = 0
-  }
+      userids.push(user.id)
+      user.uploadsCount = 0
+      user.diskUsage = 0
+    }
 
-  const maps = {}
-  const uploads = await db.table('files').whereIn('userid', userids)
+    const maps = {}
+    const uploads = await db.table('files')
+      .whereIn('userid', userids)
 
-  for (const upload of uploads) {
-    // This is the fastest method that I can think of
-    if (maps[upload.userid] === undefined)
-      maps[upload.userid] = {
-        count: 0,
-        size: 0
-      }
+    for (const upload of uploads) {
+      if (maps[upload.userid] === undefined)
+        maps[upload.userid] = { count: 0, size: 0 }
 
-    maps[upload.userid].count++
-    maps[upload.userid].size += parseInt(upload.size)
-  }
+      maps[upload.userid].count++
+      maps[upload.userid].size += parseInt(upload.size)
+    }
 
-  for (const user of users) {
-    if (!maps[user.id]) continue
-    user.uploadsCount = maps[user.id].count
-    user.diskUsage = maps[user.id].size
-  }
+    for (const user of users) {
+      if (!maps[user.id]) continue
+      user.uploadsCount = maps[user.id].count
+      user.diskUsage = maps[user.id].size
+    }
 
-  return res.json({ success: true, users, count })
+    return res.json({ success: true, users, count })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-module.exports = authController
+module.exports = self

+ 79 - 0
controllers/pathsController.js

@@ -0,0 +1,79 @@
+const { promisify } = require('util')
+const config = require('./../config')
+const fs = require('fs')
+const logger = require('./../logger')
+const path = require('path')
+
+const self = {}
+
+// Promisify these fs functions
+const fsFuncs = [
+  'access',
+  'lstat',
+  'mkdir',
+  'readdir',
+  'readFile',
+  'rename',
+  'rmdir',
+  'symlink',
+  'unlink'
+]
+
+for (const fsFunc of fsFuncs)
+  self[fsFunc] = promisify(fs[fsFunc])
+
+self.uploads = path.resolve(config.uploads.folder)
+self.chunks = path.join(self.uploads, 'chunks')
+self.thumbs = path.join(self.uploads, 'thumbs')
+self.zips = path.join(self.uploads, 'zips')
+
+self.thumbPlaceholder = path.resolve(config.uploads.generateThumbs.placeholder || 'public/images/unavailable.png')
+
+self.logs = path.resolve(config.logsFolder)
+
+self.customPages = path.resolve('pages/custom')
+self.public = path.resolve('public')
+
+self.errorRoot = path.resolve(config.errorPages.rootDir)
+
+const verify = [
+  self.uploads,
+  self.chunks,
+  self.thumbs,
+  self.zips,
+  self.logs,
+  self.customPages
+]
+
+self.init = async () => {
+  try {
+    for (const p of verify)
+      try {
+        await self.access(p)
+      } catch (err) {
+        if (err.code !== 'ENOENT') {
+          logger.error(err)
+        } else {
+          const mkdir = await self.mkdir(p)
+          if (mkdir)
+            logger.log(`Created directory: ${p}`)
+        }
+      }
+
+    // Purge chunks directory
+    const uuidDirs = await self.readdir(self.chunks)
+    for (const uuid of uuidDirs) {
+      const root = path.join(self.chunks, uuid)
+      const chunks = await self.readdir(root)
+      for (const chunk of chunks)
+        await self.unlink(path.join(root, chunk))
+      await self.rmdir(root)
+    }
+
+    self.verified = true
+  } catch (error) {
+    logger.error(error)
+  }
+}
+
+module.exports = self

+ 17 - 15
controllers/permissionController.js

@@ -1,32 +1,34 @@
-const permissionController = {}
+const self = {}
 
-permissionController.permissions = {
-  user: 0, // upload & delete own files, create & delete albums
-  moderator: 50, // delete other user's files
-  admin: 80, // manage users (disable accounts) & create moderators
-  superadmin: 100 // create admins
-  // groups will inherit permissions from groups which have lower value
+self.permissions = {
+  user: 0, // Upload & delete own files, create & delete albums
+  moderator: 50, // Delete other user's files
+  admin: 80, // Manage users (disable accounts) & create moderators
+  superadmin: 100 // Create admins
+  // Groups will inherit permissions from groups which have lower value
 }
 
-permissionController.is = (user, group) => {
+self.is = (user, group) => {
   // root bypass
-  if (user.username === 'root') return true
+  if (user.username === 'root')
+    return true
+
   const permission = user.permission || 0
-  return permission >= permissionController.permissions[group]
+  return permission >= self.permissions[group]
 }
 
-permissionController.higher = (user, target) => {
+self.higher = (user, target) => {
   const userPermission = user.permission || 0
   const targetPermission = target.permission || 0
   return userPermission > targetPermission
 }
 
-permissionController.mapPermissions = user => {
+self.mapPermissions = user => {
   const map = {}
-  Object.keys(permissionController.permissions).forEach(group => {
-    map[group] = permissionController.is(user, group)
+  Object.keys(self.permissions).forEach(group => {
+    map[group] = self.is(user, group)
   })
   return map
 }
 
-module.exports = permissionController
+module.exports = self

+ 75 - 50
controllers/tokenController.js

@@ -1,74 +1,99 @@
 const config = require('./../config')
 const db = require('knex')(config.database)
+const logger = require('./../logger')
 const perms = require('./permissionController')
 const randomstring = require('randomstring')
 const utils = require('./utilsController')
 
-const TOKEN_LENGTH = 64
-const UNIQUE_TOKEN_MAX_TRIES = 3
+const self = {
+  tokenLength: 64,
+  tokenMaxTries: 3,
+  onHold: new Set()
+}
+
+self.generateUniqueToken = async () => {
+  for (let i = 0; i < self.tokenMaxTries; i++) {
+    const token = randomstring.generate(self.tokenLength)
+    if (self.onHold.has(token))
+      continue
 
-const tokenController = {}
+    // Put token on-hold (wait for it to be inserted to DB)
+    self.onHold.add(token)
 
-tokenController.generateUniqueToken = () => {
-  return new Promise(resolve => {
-    const query = async i => {
-      const token = randomstring.generate(TOKEN_LENGTH)
-      const user = await db.table('users').where('token', token).first().catch(() => undefined)
-      if (user === undefined) return resolve(token)
-      if (++i < UNIQUE_TOKEN_MAX_TRIES) return query(i)
-      resolve(null)
+    const user = await db.table('users')
+      .where('token', token)
+      .select('id')
+      .first()
+    if (user) {
+      self.onHold.delete(token)
+      continue
     }
-    query(0)
-  })
+
+    return token
+  }
+
+  return null
 }
 
-tokenController.verify = async (req, res, next) => {
-  const token = req.body.token
-  if (token === undefined)
-    return res.status(401).json({
-      success: false,
-      description: 'No token provided.'
-    })
+self.verify = async (req, res, next) => {
+  const token = typeof req.body.token === 'string'
+    ? req.body.token.trim()
+    : ''
 
-  const user = await db.table('users').where('token', token).first()
-  if (!user)
-    return res.status(401).json({
-      success: false,
-      description: 'Invalid token.'
-    })
+  if (!token)
+    return res.status(401).json({ success: false, description: 'No token provided.' })
 
-  return res.json({
-    success: true,
-    username: user.username,
-    permissions: perms.mapPermissions(user)
-  })
+  try {
+    const user = await db.table('users')
+      .where('token', token)
+      .select('username', 'permission')
+      .first()
+
+    if (!user)
+      return res.status(401).json({ success: false, description: 'Invalid token.' })
+
+    return res.json({
+      success: true,
+      username: user.username,
+      permissions: perms.mapPermissions(user)
+    })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-tokenController.list = async (req, res, next) => {
+self.list = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
-  return res.json({
-    success: true,
-    token: user.token
-  })
+  return res.json({ success: true, token: user.token })
 }
 
-tokenController.change = async (req, res, next) => {
+self.change = async (req, res, next) => {
   const user = await utils.authorize(req, res)
   if (!user) return
 
-  const newtoken = await tokenController.generateUniqueToken()
-  if (!newtoken)
-    return res.json({ success: false, description: 'Error generating unique token (╯°□°)╯︵ ┻━┻.' })
-
-  await db.table('users').where('token', user.token).update({
-    token: newtoken,
-    timestamp: Math.floor(Date.now() / 1000)
-  })
-  return res.json({
-    success: true,
-    token: newtoken
-  })
+  const newToken = await self.generateUniqueToken()
+  if (!newToken)
+    return res.json({ success: false, description: 'Sorry, we could not allocate a unique token. Try again?' })
+
+  try {
+    await db.table('users')
+      .where('token', user.token)
+      .update({
+        token: newToken,
+        timestamp: Math.floor(Date.now() / 1000)
+      })
+    self.onHold.delete(newToken)
+
+    return res.json({
+      success: true,
+      token: newToken
+    })
+  } catch (error) {
+    logger.error(error)
+    return res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-module.exports = tokenController
+module.exports = self

File diff suppressed because it is too large
+ 511 - 496
controllers/uploadController.js


+ 360 - 361
controllers/utilsController.js

@@ -1,21 +1,37 @@
-const { spawn } = require('child_process')
+const { promisify } = require('util')
 const config = require('./../config')
 const db = require('knex')(config.database)
 const fetch = require('node-fetch')
 const ffmpeg = require('fluent-ffmpeg')
 const fs = require('fs')
 const logger = require('./../logger')
-const os = require('os')
 const path = require('path')
+const paths = require('./pathsController')
 const perms = require('./permissionController')
 const sharp = require('sharp')
+const si = require('systeminformation')
 
-const utilsController = {}
-const _stats = {
+const self = {
+  clamd: {
+    scanner: null,
+    timeout: config.uploads.scan.timeout || 5000,
+    chunkSize: config.uploads.scan.chunkSize || 64 * 1024
+  },
+  gitHash: null,
+  idSet: null,
+
+  idMaxTries: config.uploads.maxTries || 1,
+
+  imageExts: ['.webp', '.jpg', '.jpeg', '.gif', '.png', '.tiff', '.tif', '.svg'],
+  videoExts: ['.webm', '.mp4', '.wmv', '.avi', '.mov', '.mkv'],
+
+  ffprobe: promisify(ffmpeg.ffprobe)
+}
+
+const statsCache = {
   system: {
     cache: null,
-    generating: false,
-    generatedAt: 0
+    generating: false
   },
   albums: {
     cache: null,
@@ -37,23 +53,17 @@ const _stats = {
   }
 }
 
-const uploadsDir = path.resolve(config.uploads.folder)
-const thumbsDir = path.join(uploadsDir, 'thumbs')
-const thumbPlaceholder = path.resolve(config.uploads.generateThumbs.placeholder || 'public/images/unavailable.png')
 const cloudflareAuth = config.cloudflare.apiKey && config.cloudflare.email && config.cloudflare.zoneId
 
-utilsController.imageExtensions = ['.webp', '.jpg', '.jpeg', '.gif', '.png', '.tiff', '.tif', '.svg']
-utilsController.videoExtensions = ['.webm', '.mp4', '.wmv', '.avi', '.mov', '.mkv']
-
-utilsController.mayGenerateThumb = extname => {
-  return (config.uploads.generateThumbs.image && utilsController.imageExtensions.includes(extname)) ||
-    (config.uploads.generateThumbs.video && utilsController.videoExtensions.includes(extname))
+self.mayGenerateThumb = extname => {
+  return (config.uploads.generateThumbs.image && self.imageExts.includes(extname)) ||
+    (config.uploads.generateThumbs.video && self.videoExts.includes(extname))
 }
 
-// expand if necessary (must be lower case); for now only preserves some known tarballs
-utilsController.preserves = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz']
+// Expand if necessary (must be lower case); for now only preserves some known tarballs
+const extPreserves = ['.tar.gz', '.tar.z', '.tar.bz2', '.tar.lzma', '.tar.lzo', '.tar.xz']
 
-utilsController.extname = filename => {
+self.extname = filename => {
   // Always return blank string if the filename does not seem to have a valid extension
   // Files such as .DS_Store (anything that starts with a dot, without any extension after) will still be accepted
   if (!/\../.test(filename)) return ''
@@ -69,9 +79,9 @@ utilsController.extname = filename => {
   }
 
   // check against extensions that must be preserved
-  for (let i = 0; i < utilsController.preserves.length; i++)
-    if (lower.endsWith(utilsController.preserves[i])) {
-      extname = utilsController.preserves[i]
+  for (const extPreserve of extPreserves)
+    if (lower.endsWith(extPreserve)) {
+      extname = extPreserve
       break
     }
 
@@ -81,18 +91,20 @@ utilsController.extname = filename => {
   return extname + multi
 }
 
-utilsController.escape = string => {
+self.escape = (string) => {
   // MIT License
   // Copyright(c) 2012-2013 TJ Holowaychuk
   // Copyright(c) 2015 Andreas Lubbe
   // Copyright(c) 2015 Tiancheng "Timothy" Gu
 
-  if (!string) return string
+  if (!string)
+    return string
 
-  const str = '' + string
+  const str = String(string)
   const match = /["'&<>]/.exec(str)
 
-  if (!match) return str
+  if (!match)
+    return str
 
   let escape
   let html = ''
@@ -132,258 +144,266 @@ utilsController.escape = string => {
     : html
 }
 
-utilsController.authorize = async (req, res) => {
+self.authorize = async (req, res) => {
+  // TODO: Improve usage of this function by the other APIs
   const token = req.headers.token
   if (token === undefined) {
     res.status(401).json({ success: false, description: 'No token provided.' })
     return
   }
 
-  const user = await db.table('users').where('token', token).first()
-  if (user) {
-    if (user.enabled === false || user.enabled === 0) {
-      res.json({ success: false, description: 'This account has been disabled.' })
-      return
+  try {
+    const user = await db.table('users')
+      .where('token', token)
+      .first()
+    if (user) {
+      if (user.enabled === false || user.enabled === 0) {
+        res.json({ success: false, description: 'This account has been disabled.' })
+        return
+      }
+      return user
     }
-    return user
-  }
 
-  res.status(401).json({
-    success: false,
-    description: 'Invalid token.'
-  })
+    res.status(401).json({ success: false, description: 'Invalid token.' })
+  } catch (error) {
+    logger.error(error)
+    res.status(500).json({ success: false, description: 'An unexpected error occurred. Try again?' })
+  }
 }
 
-utilsController.generateThumbs = (name, force) => {
-  return new Promise(resolve => {
-    const extname = utilsController.extname(name)
-    const thumbname = path.join(thumbsDir, name.slice(0, -extname.length) + '.png')
-    fs.lstat(thumbname, async (error, stats) => {
-      if (error && error.code !== 'ENOENT') {
-        logger.error(error)
-        return resolve(false)
-      }
+self.generateThumbs = async (name, extname, force) => {
+  const thumbname = path.join(paths.thumbs, name.slice(0, -extname.length) + '.png')
+
+  try {
+    // Check if thumbnail already exists
+    try {
+      const lstat = await paths.lstat(thumbname)
+      if (lstat.isSymbolicLink())
+        // Unlink if symlink (should be symlink to the placeholder)
+        await paths.unlink(thumbname)
+      else if (!force)
+        // Continue only if it does not exist, unless forced to
+        return true
+    } catch (error) {
+      // Re-throw error
+      if (error.code !== 'ENOENT')
+        throw error
+    }
 
-      if (!error && stats.isSymbolicLink()) {
-        // Unlink symlink
-        const unlink = await new Promise(resolve => {
-          fs.unlink(thumbname, error => {
-            if (error) logger.error(error)
-            resolve(!error)
+    // Full path to input file
+    const input = path.join(paths.uploads, name)
+
+    // If image extension
+    if (self.imageExts.includes(extname)) {
+      const resizeOptions = {
+        width: 200,
+        height: 200,
+        fit: 'contain',
+        background: {
+          r: 0,
+          g: 0,
+          b: 0,
+          alpha: 0
+        }
+      }
+      const image = sharp(input)
+      const metadata = await image.metadata()
+      if (metadata.width > resizeOptions.width || metadata.height > resizeOptions.height) {
+        await image
+          .resize(resizeOptions)
+          .toFile(thumbname)
+      } else if (metadata.width === resizeOptions.width && metadata.height === resizeOptions.height) {
+        await image
+          .toFile(thumbname)
+      } else {
+        const x = resizeOptions.width - metadata.width
+        const y = resizeOptions.height - metadata.height
+        await image
+          .extend({
+            top: Math.floor(y / 2),
+            bottom: Math.ceil(y / 2),
+            left: Math.floor(x / 2),
+            right: Math.ceil(x / 2),
+            background: resizeOptions.background
           })
-        })
-        if (!unlink) return resolve(false)
+          .toFile(thumbname)
       }
+    } else if (self.videoExts.includes(extname)) {
+      const metadata = await self.ffprobe(input)
 
-      // Only make thumbnail if it does not exist (ENOENT)
-      if (!error && !force) return resolve(true)
-
-      // Full path to input file
-      const input = path.join(__dirname, '..', config.uploads.folder, name)
-
-      new Promise((resolve, reject) => {
-        // If image extension
-        if (utilsController.imageExtensions.includes(extname)) {
-          const resizeOptions = {
-            width: 200,
-            height: 200,
-            fit: 'contain',
-            background: {
-              r: 0,
-              g: 0,
-              b: 0,
-              alpha: 0
-            }
-          }
-          const image = sharp(input)
-          return image
-            .metadata()
-            .then(metadata => {
-              if (metadata.width > resizeOptions.width || metadata.height > resizeOptions.height) {
-                return image
-                  .resize(resizeOptions)
-                  .toFile(thumbname)
-              } else if (metadata.width === resizeOptions.width && metadata.height === resizeOptions.height) {
-                return image
-                  .toFile(thumbname)
-              } else {
-                const x = resizeOptions.width - metadata.width
-                const y = resizeOptions.height - metadata.height
-                return image
-                  .extend({
-                    top: Math.floor(y / 2),
-                    bottom: Math.ceil(y / 2),
-                    left: Math.floor(x / 2),
-                    right: Math.ceil(x / 2),
-                    background: resizeOptions.background
-                  })
-                  .toFile(thumbname)
-              }
-            })
-            .then(() => resolve(true))
-            .catch(reject)
-        }
-
-        // Otherwise video extension
-        ffmpeg.ffprobe(input, (error, metadata) => {
-          if (error) return reject(error)
-
-          // Skip files that do not have video streams/channels
-          if (!metadata.streams || !metadata.streams.some(s => s.codec_type === 'video'))
-            // eslint-disable-next-line prefer-promise-reject-errors
-            return reject('File does not contain any video stream')
+      // Skip files that do not have video streams/channels
+      if (!metadata.streams || !metadata.streams.some(s => s.codec_type === 'video'))
+        throw 'File does not contain any video stream'
 
-          ffmpeg(input)
-            .inputOptions([
+      await new Promise((resolve, reject) => {
+        ffmpeg(input)
+          .inputOptions([
               `-ss ${parseInt(metadata.format.duration) * 20 / 100}`
-            ])
-            .output(thumbname)
-            .outputOptions([
-              '-vframes 1',
-              '-vf scale=200:200:force_original_aspect_ratio=decrease'
-            ])
-            .on('error', error => {
-              // Attempt to unlink thumbnail
-              // Since ffmpeg may have already created an incomplete thumbnail
-              fs.unlink(thumbname, err => {
-                if (err && err.code !== 'ENOENT')
-                  logger.error(`[${name}]: ${err.toString()}`)
-                reject(error)
-              })
-            })
-            .on('end', () => resolve(true))
-            .run()
-        })
-      })
-        .then(resolve)
-        .catch(error => {
-          // Suppress error logging for errors these patterns
-          const errorString = error.toString()
-          const suppress = [
-            /Input file contains unsupported image format/,
-            /Invalid data found when processing input/,
-            /File does not contain any video stream/
-          ]
-          if (!suppress.some(t => t.test(errorString)))
-            logger.error(`[${name}]: ${errorString}`)
-
-          fs.symlink(thumbPlaceholder, thumbname, err => {
-            if (err) logger.error(err)
-            // We return true anyway
-            // if we could make a symlink to the placeholder image
-            resolve(!err)
+          ])
+          .output(thumbname)
+          .outputOptions([
+            '-vframes 1',
+            '-vf scale=200:200:force_original_aspect_ratio=decrease'
+          ])
+          .on('error', async error => {
+            // Try to unlink thumbnail,
+            // since ffmpeg may have created an incomplete thumbnail
+            try {
+              await paths.unlink(thumbname)
+            } catch (err) {
+              if (err && err.code !== 'ENOENT')
+                logger.error(`[${name}]: ${err.toString()}`)
+            }
+            return reject(error)
           })
-        })
-    })
-  })
+          .on('end', () => resolve(true))
+          .run()
+      })
+    } else {
+      return false
+    }
+  } catch (error) {
+    // Suppress error logging for errors these patterns
+    const errorString = error.toString()
+    const suppress = [
+      /Input file contains unsupported image format/,
+      /Invalid data found when processing input/,
+      /File does not contain any video stream/
+    ]
+
+    if (!suppress.some(t => t.test(errorString)))
+      logger.error(`[${name}]: ${errorString}`)
+
+    try {
+      await paths.symlink(paths.thumbPlaceholder, thumbname)
+      return true
+    } catch (err) {
+      logger.error(err)
+      return false
+    }
+  }
+
+  return true
 }
 
-utilsController.deleteFile = (filename, set) => {
-  return new Promise((resolve, reject) => {
-    const extname = utilsController.extname(filename)
-    return fs.unlink(path.join(uploadsDir, filename), error => {
-      if (error && error.code !== 'ENOENT') return reject(error)
-      const identifier = filename.split('.')[0]
-      // eslint-disable-next-line curly
-      if (set) {
-        set.delete(identifier)
-        // logger.log(`Removed ${identifier} from identifiers cache (deleteFile)`)
-      }
-      if (utilsController.imageExtensions.includes(extname) || utilsController.videoExtensions.includes(extname)) {
-        const thumb = `${identifier}.png`
-        return fs.unlink(path.join(thumbsDir, thumb), error => {
-          if (error && error.code !== 'ENOENT') return reject(error)
-          resolve(true)
-        })
-      }
-      resolve(true)
-    })
-  })
+self.unlinkFile = async (filename, predb) => {
+  try {
+    await paths.unlink(path.join(paths.uploads, filename))
+  } catch (error) {
+    // Return true if file does not exist
+    if (error.code !== 'ENOENT')
+      throw error
+  }
+
+  const identifier = filename.split('.')[0]
+
+  // Do not remove from identifiers cache on pre-db-deletion
+  // eslint-disable-next-line curly
+  if (!predb && self.idSet) {
+    self.idSet.delete(identifier)
+    // logger.log(`Removed ${identifier} from identifiers cache (deleteFile)`)
+  }
+
+  const extname = self.extname(filename)
+  if (self.imageExts.includes(extname) || self.videoExts.includes(extname))
+    try {
+      await paths.unlink(path.join(paths.thumbs, `${identifier}.png`))
+    } catch (error) {
+      if (error.code !== 'ENOENT')
+        throw error
+    }
 }
 
-utilsController.bulkDeleteFiles = async (field, values, user, set) => {
+self.bulkDeleteFromDb = async (field, values, user) => {
   if (!user || !['id', 'name'].includes(field)) return
 
   // SQLITE_LIMIT_VARIABLE_NUMBER, which defaults to 999
   // Read more: https://www.sqlite.org/limits.html
   const MAX_VARIABLES_CHUNK_SIZE = 999
   const chunks = []
-  const _values = values.slice() // Make a shallow copy of the array
-  while (_values.length)
-    chunks.push(_values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
+  while (values.length)
+    chunks.push(values.splice(0, MAX_VARIABLES_CHUNK_SIZE))
 
-  const failed = []
+  let failed = []
   const ismoderator = perms.is(user, 'moderator')
-  await Promise.all(chunks.map((chunk, index) => {
-    const job = async () => {
-      try {
-        const files = await db.table('files')
-          .whereIn(field, chunk)
-          .where(function () {
-            if (!ismoderator)
-              this.where('userid', user.id)
-          })
 
-        // Push files that could not be found in DB
-        failed.push.apply(failed, chunk.filter(v => !files.find(file => file[field] === v)))
-
-        // Delete all found files physically
-        const deletedFiles = []
-        await Promise.all(files.map(file =>
-          utilsController.deleteFile(file.name)
-            .then(() => deletedFiles.push(file))
-            .catch(error => {
-              failed.push(file[field])
-              logger.error(error)
-            })
-        ))
-
-        if (!deletedFiles.length)
-          return true
-
-        // Delete all found files from database
-        const deletedFromDb = await db.table('files')
-          .whereIn('id', deletedFiles.map(file => file.id))
-          .del()
-
-        if (set)
-          deletedFiles.forEach(file => {
-            const identifier = file.name.split('.')[0]
-            set.delete(identifier)
-            // logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFiles)`)
-          })
+  try {
+    let unlinkeds = []
+    const albumids = []
 
-        // Update albums if necessary
-        if (deletedFromDb) {
-          const albumids = []
-          deletedFiles.forEach(file => {
-            if (file.albumid && !albumids.includes(file.albumid))
-              albumids.push(file.albumid)
-          })
-          await db.table('albums')
-            .whereIn('id', albumids)
-            .update('editedAt', Math.floor(Date.now() / 1000))
-            .catch(logger.error)
+    for (let i = 0; i < chunks.length; i++) {
+      const files = await db.table('files')
+        .whereIn(field, chunks[i])
+        .where(function () {
+          if (!ismoderator)
+            self.where('userid', user.id)
+        })
+
+      // Push files that could not be found in db
+      failed = failed.concat(chunks[i].filter(value => !files.find(file => file[field] === value)))
+
+      // Unlink all found files
+      const unlinked = []
+      for (const file of files)
+        try {
+          await self.unlinkFile(file.name, true)
+          unlinked.push(file)
+        } catch (error) {
+          logger.error(error)
+          failed.push(file[field])
         }
 
-        // Purge Cloudflare's cache if necessary
-        if (config.cloudflare.purgeCache)
-          utilsController.purgeCloudflareCache(deletedFiles.map(file => file.name), true, true)
-            .then(results => {
-              for (const result of results)
-                if (result.errors.length)
-                  result.errors.forEach(error => logger.error(`[CF]: ${error}`))
-            })
-      } catch (error) {
-        logger.error(error)
-      }
+      if (!unlinked.length)
+        continue
+
+      // Delete all unlinked files from db
+      await db.table('files')
+        .whereIn('id', unlinked.map(file => file.id))
+        .del()
+      self.invalidateStatsCache('uploads')
+
+      if (self.idSet)
+        unlinked.forEach(file => {
+          const identifier = file.name.split('.')[0]
+          self.idSet.delete(identifier)
+          // logger.log(`Removed ${identifier} from identifiers cache (bulkDeleteFromDb)`)
+        })
+
+      // Push album ids
+      unlinked.forEach(file => {
+        if (file.albumid && !albumids.includes(file.albumid))
+          albumids.push(file.albumid)
+      })
+
+      // Push unlinked files
+      unlinkeds = unlinkeds.concat(unlinked)
     }
-    return new Promise(resolve => job().then(() => resolve()))
-  }))
+
+    if (unlinkeds.length) {
+      // Update albums if necessary, but do not wait
+      if (albumids.length)
+        db.table('albums')
+          .whereIn('id', albumids)
+          .update('editedAt', Math.floor(Date.now() / 1000))
+          .catch(logger.error)
+
+      // Purge Cloudflare's cache if necessary, but do not wait
+      if (config.cloudflare.purgeCache)
+        self.purgeCloudflareCache(unlinkeds.map(file => file.name), true, true)
+          .then(results => {
+            for (const result of results)
+              if (result.errors.length)
+                result.errors.forEach(error => logger.error(`[CF]: ${error}`))
+          })
+    }
+  } catch (error) {
+    logger.error(error)
+  }
+
   return failed
 }
 
-utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => {
+self.purgeCloudflareCache = async (names, uploads, thumbs) => {
   if (!Array.isArray(names) || !names.length || !cloudflareAuth)
     return [{
       success: false,
@@ -398,8 +418,8 @@ utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => {
   names = names.map(name => {
     if (uploads) {
       const url = `${domain}/${name}`
-      const extname = utilsController.extname(name)
-      if (thumbs && utilsController.mayGenerateThumb(extname))
+      const extname = self.extname(name)
+      if (thumbs && self.mayGenerateThumb(extname))
         thumbNames.push(`${domain}/thumbs/${name.slice(0, -extname.length)}.png`)
       return url
     } else {
@@ -411,87 +431,70 @@ utilsController.purgeCloudflareCache = async (names, uploads, thumbs) => {
   // Split array into multiple arrays with max length of 30 URLs
   // https://api.cloudflare.com/#zone-purge-files-by-url
   const MAX_LENGTH = 30
-  const files = []
+  const chunks = []
   while (names.length)
-    files.push(names.splice(0, MAX_LENGTH))
+    chunks.push(names.splice(0, MAX_LENGTH))
 
   const url = `https://api.cloudflare.com/client/v4/zones/${config.cloudflare.zoneId}/purge_cache`
   const results = []
-  await new Promise(resolve => {
-    const purge = async i => {
-      const result = {
-        success: false,
-        files: files[i],
-        errors: []
-      }
 
-      try {
-        const fetchPurge = await fetch(url, {
-          method: 'POST',
-          body: JSON.stringify({
-            files: result.files
-          }),
-          headers: {
-            'Content-Type': 'application/json',
-            'X-Auth-Email': config.cloudflare.email,
-            'X-Auth-Key': config.cloudflare.apiKey
-          }
-        }).then(res => res.json())
-        result.success = fetchPurge.success
-        if (Array.isArray(fetchPurge.errors) && fetchPurge.errors.length)
-          result.errors = fetchPurge.errors.map(error => `${error.code}: ${error.message}`)
-      } catch (error) {
-        result.errors = [error.toString()]
-      }
-
-      results.push(result)
+  for (const chunk of chunks) {
+    const result = {
+      success: false,
+      files: chunk,
+      errors: []
+    }
 
-      if (i < files.length - 1)
-        purge(i + 1)
-      else
-        resolve()
+    try {
+      const purge = await fetch(url, {
+        method: 'POST',
+        body: JSON.stringify({ files: chunk }),
+        headers: {
+          'Content-Type': 'application/json',
+          'X-Auth-Email': config.cloudflare.email,
+          'X-Auth-Key': config.cloudflare.apiKey
+        }
+      })
+      const response = await purge.json()
+      result.success = response.success
+      if (Array.isArray(response.errors) && response.errors.length)
+        result.errors = response.errors.map(error => `${error.code}: ${error.message}`)
+    } catch (error) {
+      result.errors = [error.toString()]
     }
-    purge(0)
-  })
+
+    results.push(result)
+  }
 
   return results
 }
 
-utilsController.getMemoryUsage = () => {
-  // For now this is linux-only. Not sure if darwin has this too.
-  return new Promise((resolve, reject) => {
-    const prc = spawn('free', ['-b'])
-    prc.stdout.setEncoding('utf8')
-    prc.stdout.on('data', data => {
-      const parsed = {}
-      const str = data.toString()
-      const lines = str.split(/\n/g)
-      for (let i = 0; i < lines.length; i++) {
-        lines[i] = lines[i].split(/\s+/)
-        if (i === 0) continue
-        const id = lines[i][0].toLowerCase().slice(0, -1)
-        if (!id) continue
-        if (!parsed[id]) parsed[id] = {}
-        for (let j = 1; j < lines[i].length; j++) {
-          const bytes = parseInt(lines[i][j])
-          parsed[id][lines[0][j]] = isNaN(bytes) ? null : bytes
-        }
-      }
-      resolve(parsed)
-    })
-    prc.on('close', code => {
-      reject(new Error(`Process exited with code ${code}.`))
-    })
-  })
+self.bulkDeleteExpired = async (dryrun) => {
+  const timestamp = Date.now() / 1000
+  const field = 'id'
+  const sudo = { username: 'root' }
+
+  const result = {}
+  result.expired = await db.table('files')
+    .where('expirydate', '<=', timestamp)
+    .select(field)
+    .then(rows => rows.map(row => row[field]))
+
+  if (!dryrun) {
+    const values = result.expired.slice() // Make a shallow copy
+    result.failed = await self.bulkDeleteFromDb(field, values, sudo)
+  }
+
+  return result
 }
 
-utilsController.invalidateStatsCache = type => {
+self.invalidateStatsCache = type => {
   if (!['albums', 'users', 'uploads'].includes(type)) return
-  _stats[type].invalidatedAt = Date.now()
+  statsCache[type].invalidatedAt = Date.now()
 }
 
-utilsController.stats = async (req, res, next) => {
-  const user = await utilsController.authorize(req, res)
+self.stats = async (req, res, next) => {
+  const user = await self.authorize(req, res)
   if (!user) return
 
   const isadmin = perms.is(user, 'admin')
@@ -499,48 +502,44 @@ utilsController.stats = async (req, res, next) => {
 
   const stats = {}
 
-  if (!_stats.system.cache && _stats.system.generating) {
+  // Re-use caches as long as they are still valid
+
+  if (!statsCache.system.cache && statsCache.system.generating) {
     stats.system = false
-  } else if ((Date.now() - _stats.system.generatedAt <= 1000) || _stats.system.generating) {
-    // Re-use system cache for only 1000ms
-    stats.system = _stats.system.cache
+  } else if (statsCache.system.generating) {
+    stats.system = statsCache.system.cache
   } else {
-    _stats.system.generating = true
-    const platform = os.platform()
-    stats.system = {
-      platform: `${platform}-${os.arch()}`,
-      systemMemory: null,
-      nodeVersion: `${process.versions.node}`,
-      memoryUsage: process.memoryUsage().rss
-    }
+    statsCache.system.generating = true
 
-    if (platform === 'linux') {
-      const memoryUsage = await utilsController.getMemoryUsage()
-      stats.system.systemMemory = {
-        used: memoryUsage.mem.used,
-        total: memoryUsage.mem.total
-      }
-    } else {
-      delete stats.system.systemMemory
-    }
+    const os = await si.osInfo()
+    const currentLoad = await si.currentLoad()
+    const mem = await si.mem()
 
-    if (platform !== 'win32')
-      stats.system.loadAverage = `${os.loadavg().map(load => load.toFixed(2)).join(', ')}`
+    stats.system = {
+      platform: `${os.platform} ${os.arch}`,
+      distro: `${os.distro} ${os.release}`,
+      kernel: os.kernel,
+      cpuLoad: `${currentLoad.currentload.toFixed(1)}%`,
+      cpusLoad: currentLoad.cpus.map(cpu => `${cpu.load.toFixed(1)}%`).join(', '),
+      systemMemory: {
+        used: mem.active,
+        total: mem.total
+      },
+      memoryUsage: process.memoryUsage().rss,
+      nodeVersion: `${process.versions.node}`
+    }
 
     // Update cache
-    _stats.system.cache = stats.system
-    _stats.system.generatedAt = Date.now()
-    _stats.system.generating = false
+    statsCache.system.cache = stats.system
+    statsCache.system.generating = false
   }
 
-  // Re-use albums, users, and uploads caches as long as they are still valid
-
-  if (!_stats.albums.cache && _stats.albums.generating) {
+  if (!statsCache.albums.cache && statsCache.albums.generating) {
     stats.albums = false
-  } else if ((_stats.albums.invalidatedAt < _stats.albums.generatedAt) || _stats.albums.generating) {
-    stats.albums = _stats.albums.cache
+  } else if ((statsCache.albums.invalidatedAt < statsCache.albums.generatedAt) || statsCache.albums.generating) {
+    stats.albums = statsCache.albums.cache
   } else {
-    _stats.albums.generating = true
+    statsCache.albums.generating = true
     stats.albums = {
       total: 0,
       active: 0,
@@ -560,7 +559,7 @@ utilsController.stats = async (req, res, next) => {
         if (album.zipGeneratedAt) identifiers.push(album.identifier)
       }
 
-    const zipsDir = path.join(uploadsDir, 'zips')
+    const zipsDir = path.join(paths.uploads, 'zips')
     await Promise.all(identifiers.map(identifier => {
       return new Promise(resolve => {
         const filePath = path.join(zipsDir, `${identifier}.zip`)
@@ -572,17 +571,17 @@ utilsController.stats = async (req, res, next) => {
     }))
 
     // Update cache
-    _stats.albums.cache = stats.albums
-    _stats.albums.generatedAt = Date.now()
-    _stats.albums.generating = false
+    statsCache.albums.cache = stats.albums
+    statsCache.albums.generatedAt = Date.now()
+    statsCache.albums.generating = false
   }
 
-  if (!_stats.users.cache && _stats.users.generating) {
+  if (!statsCache.users.cache && statsCache.users.generating) {
     stats.users = false
-  } else if ((_stats.users.invalidatedAt < _stats.users.generatedAt) || _stats.users.generating) {
-    stats.users = _stats.users.cache
+  } else if ((statsCache.users.invalidatedAt < statsCache.users.generatedAt) || statsCache.users.generating) {
+    stats.users = statsCache.users.cache
   } else {
-    _stats.users.generating = true
+    statsCache.users.generating = true
     stats.users = {
       total: 0,
       disabled: 0
@@ -609,17 +608,17 @@ utilsController.stats = async (req, res, next) => {
     }
 
     // Update cache
-    _stats.users.cache = stats.users
-    _stats.users.generatedAt = Date.now()
-    _stats.users.generating = false
+    statsCache.users.cache = stats.users
+    statsCache.users.generatedAt = Date.now()
+    statsCache.users.generating = false
   }
 
-  if (!_stats.uploads.cache && _stats.uploads.generating) {
+  if (!statsCache.uploads.cache && statsCache.uploads.generating) {
     stats.uploads = false
-  } else if ((_stats.uploads.invalidatedAt < _stats.uploads.generatedAt) || _stats.uploads.generating) {
-    stats.uploads = _stats.uploads.cache
+  } else if ((statsCache.uploads.invalidatedAt < statsCache.uploads.generatedAt) || statsCache.uploads.generating) {
+    stats.uploads = statsCache.uploads.cache
   } else {
-    _stats.uploads.generating = true
+    statsCache.uploads.generating = true
     stats.uploads = {
       total: 0,
       size: 0,
@@ -632,22 +631,22 @@ utilsController.stats = async (req, res, next) => {
     stats.uploads.total = uploads.length
     for (const upload of uploads) {
       stats.uploads.size += parseInt(upload.size)
-      const extname = utilsController.extname(upload.name)
-      if (utilsController.imageExtensions.includes(extname))
+      const extname = self.extname(upload.name)
+      if (self.imageExts.includes(extname))
         stats.uploads.images++
-      else if (utilsController.videoExtensions.includes(extname))
+      else if (self.videoExts.includes(extname))
         stats.uploads.videos++
       else
         stats.uploads.others++
     }
 
     // Update cache
-    _stats.uploads.cache = stats.uploads
-    _stats.uploads.generatedAt = Date.now()
-    _stats.uploads.generating = false
+    statsCache.uploads.cache = stats.uploads
+    statsCache.uploads.generatedAt = Date.now()
+    statsCache.uploads.generating = false
   }
 
   return res.json({ success: true, stats })
 }
 
-module.exports = utilsController
+module.exports = self

+ 1 - 1
database/db.js

@@ -34,6 +34,7 @@ const init = function (db) {
       table.string('ip')
       table.integer('albumid')
       table.integer('timestamp')
+      table.integer('expirydate')
     }).then(() => {})
   })
 
@@ -46,7 +47,6 @@ const init = function (db) {
       table.string('token')
       table.integer('enabled')
       table.integer('timestamp')
-      table.integer('fileLength')
       table.integer('permission')
     }).then(() => {
       db.table('users').where({ username: 'root' }).then((user) => {

+ 26 - 22
database/migration.js

@@ -3,6 +3,9 @@ const db = require('knex')(config.database)
 const perms = require('./../controllers/permissionController')
 
 const map = {
+  files: {
+    expirydate: 'integer'
+  },
   albums: {
     editedAt: 'integer',
     zipGeneratedAt: 'integer',
@@ -12,26 +15,25 @@ const map = {
   },
   users: {
     enabled: 'integer',
-    fileLength: 'integer',
     permission: 'integer'
   }
 }
 
-const migration = {}
-migration.start = async () => {
-  const tables = Object.keys(map)
-  await Promise.all(tables.map(table => {
-    const columns = Object.keys(map[table])
-    return Promise.all(columns.map(async column => {
-      if (await db.schema.hasColumn(table, column))
-        return // console.log(`SKIP: ${column} => ${table}.`)
+;(async () => {
+  const tableNames = Object.keys(map)
+  for (const tableName of tableNames) {
+    const columnNames = Object.keys(map[tableName])
+    for (const columnName of columnNames) {
+      if (await db.schema.hasColumn(tableName, columnName))
+        continue
 
-      const columnType = map[table][column]
-      return db.schema.table(table, t => { t[columnType](column) })
-        .then(() => console.log(`OK: ${column} (${columnType}) => ${table}.`))
-        .catch(console.error)
-    }))
-  }))
+      const columnType = map[tableName][columnName]
+      await db.schema.table(tableName, table => {
+        table[columnType](columnName)
+      })
+      console.log(`OK: ${tableName} <- ${columnName} (${columnType})`)
+    }
+  }
 
   await db.table('users')
     .where('username', 'root')
@@ -39,15 +41,17 @@ migration.start = async () => {
     .update({
       permission: perms.permissions.superadmin
     })
-    .then(rows => {
-      // NOTE: permissionController.js actually have a hard-coded check for "root" account so that
+    .then(result => {
+      // NOTE: permissionController.js actually has a hard-coded check for "root" account so that
       // it will always have "superadmin" permission regardless of its permission value in database
-      if (!rows) return console.log('Unable to update root\'s permission into superadmin.')
+      if (!result) return console.log('Unable to update root\'s permission into superadmin.')
       console.log(`Updated root's permission to ${perms.permissions.superadmin} (superadmin).`)
     })
 
   console.log('Migration finished! Now you may start lolisafe normally.')
-  process.exit(0)
-}
-
-migration.start()
+})()
+  .then(() => process.exit(0))
+  .catch(error => {
+    console.error(error)
+    process.exit(1)
+  })

+ 8 - 8
logger.js

@@ -1,26 +1,26 @@
 const { inspect } = require('util')
 
-const logger = {}
+const self = {}
 
-logger.clean = item => {
+const clean = item => {
   if (typeof item === 'string') return item
   const cleaned = inspect(item, { depth: 0 })
   return cleaned
 }
 
-logger.write = (content, options = {}) => {
+const write = (content, options = {}) => {
   const date = new Date().toISOString()
     .replace(/T/, ' ')
     .replace(/\..*/, '')
   const stream = options.error ? process.stderr : process.stdout
-  stream.write(`[${date}]: ${options.prefix || ''}${logger.clean(content)}\n`)
+  stream.write(`[${date}]: ${options.prefix || ''}${clean(content)}\n`)
 }
 
-logger.log = logger.write
+self.log = write
 
-logger.error = (content, options = {}) => {
+self.error = (content, options = {}) => {
   options.error = true
-  logger.write(content, options)
+  write(content, options)
 }
 
-module.exports = logger
+module.exports = self

+ 118 - 103
lolisafe.js

@@ -2,10 +2,10 @@ const bodyParser = require('body-parser')
 const clamd = require('clamdjs')
 const config = require('./config')
 const express = require('express')
-const fs = require('fs')
 const helmet = require('helmet')
 const logger = require('./logger')
 const nunjucks = require('nunjucks')
+const path = require('path')
 const RateLimit = require('express-rate-limit')
 const readline = require('readline')
 const safe = express()
@@ -17,6 +17,7 @@ process.on('unhandledRejection', error => {
   logger.error(error, { prefix: 'Unhandled Rejection (Promise): ' })
 })
 
+const paths = require('./controllers/pathsController')
 const utils = require('./controllers/utilsController')
 
 const album = require('./routes/album')
@@ -26,14 +27,6 @@ const nojs = require('./routes/nojs')
 const db = require('knex')(config.database)
 require('./database/db.js')(db)
 
-// Check and create missing directories
-fs.existsSync('./pages/custom') || fs.mkdirSync('./pages/custom')
-fs.existsSync(`./${config.logsFolder}`) || fs.mkdirSync(`./${config.logsFolder}`)
-fs.existsSync(`./${config.uploads.folder}`) || fs.mkdirSync(`./${config.uploads.folder}`)
-fs.existsSync(`./${config.uploads.folder}/chunks`) || fs.mkdirSync(`./${config.uploads.folder}/chunks`)
-fs.existsSync(`./${config.uploads.folder}/thumbs`) || fs.mkdirSync(`./${config.uploads.folder}/thumbs`)
-fs.existsSync(`./${config.uploads.folder}/zips`) || fs.mkdirSync(`./${config.uploads.folder}/zips`)
-
 safe.use(helmet())
 if (config.trustProxy) safe.set('trust proxy', 1)
 
@@ -57,7 +50,7 @@ if (Array.isArray(config.rateLimits) && config.rateLimits.length)
 safe.use(bodyParser.urlencoded({ extended: true }))
 safe.use(bodyParser.json())
 
-// safe.fiery.me-exclusive cache control
+// Cache control (safe.fiery.me)
 if (config.cacheControl) {
   const cacheControls = {
     // max-age: 30 days
@@ -79,9 +72,9 @@ if (config.cacheControl) {
   }
 
   if (config.serveFilesWithNode)
-    safe.use('/', express.static(config.uploads.folder, { setHeaders }))
+    safe.use('/', express.static(paths.uploads, { setHeaders }))
 
-  safe.use('/', express.static('./public', { setHeaders }))
+  safe.use('/', express.static(paths.public, { setHeaders }))
 
   // Do NOT cache these dynamic routes
   safe.use(['/a', '/api', '/nojs'], (req, res, next) => {
@@ -102,112 +95,107 @@ if (config.cacheControl) {
   })
 } else {
   if (config.serveFilesWithNode)
-    safe.use('/', express.static(config.uploads.folder))
+    safe.use('/', express.static(paths.uploads))
 
-  safe.use('/', express.static('./public'))
+  safe.use('/', express.static(paths.public))
 }
 
 safe.use('/', album)
 safe.use('/', nojs)
 safe.use('/api', api)
 
-if (!Array.isArray(config.pages) || !config.pages.length) {
-  logger.error('Config does not haves any frontend pages enabled')
-  process.exit(1)
-}
+;(async () => {
+  try {
+    // Verify paths, create missing ones, clean up temp ones
+    await paths.init()
 
-for (const page of config.pages)
-  if (fs.existsSync(`./pages/custom/${page}.html`)) {
-    safe.get(`/${page}`, (req, res, next) => res.sendFile(`${page}.html`, {
-      root: './pages/custom/'
-    }))
-  } else if (page === 'home') {
-    safe.get('/', (req, res, next) => res.render('home', {
-      maxSize: config.uploads.maxSize,
-      urlMaxSize: config.uploads.urlMaxSize,
-      urlDisclaimerMessage: config.uploads.urlDisclaimerMessage,
-      urlExtensionsFilterMode: config.uploads.urlExtensionsFilterMode,
-      urlExtensionsFilter: config.uploads.urlExtensionsFilter,
-      gitHash: safe.get('git-hash')
-    }))
-  } else if (page === 'faq') {
-    const fileLength = config.uploads.fileLength
-    safe.get('/faq', (req, res, next) => res.render('faq', {
-      whitelist: config.extensionsFilterMode === 'whitelist',
-      extensionsFilter: config.extensionsFilter,
-      fileLength,
-      tooShort: (fileLength.max - fileLength.default) > (fileLength.default - fileLength.min),
-      noJsMaxSize: parseInt(config.cloudflare.noJsMaxSize) < parseInt(config.uploads.maxSize),
-      chunkSize: config.uploads.chunkSize
-    }))
-  } else {
-    safe.get(`/${page}`, (req, res, next) => res.render(page))
-  }
+    if (!Array.isArray(config.pages) || !config.pages.length) {
+      logger.error('Config file does not have any frontend pages enabled')
+      process.exit(1)
+    }
 
-safe.use((req, res, next) => {
-  res.status(404).sendFile(config.errorPages[404], { root: config.errorPages.rootDir })
-})
-safe.use((error, req, res, next) => {
-  logger.error(error)
-  res.status(500).sendFile(config.errorPages[500], { root: config.errorPages.rootDir })
-})
+    for (const page of config.pages) {
+      const customPage = path.join(paths.customPages, `${page}.html`)
+      if (!await paths.access(customPage).catch(() => true))
+        safe.get(`/${page === 'home' ? '' : page}`, (req, res, next) => res.sendFile(customPage))
+      else if (page === 'home')
+        safe.get('/', (req, res, next) => res.render('home', {
+          maxSize: parseInt(config.uploads.maxSize),
+          urlMaxSize: parseInt(config.uploads.urlMaxSize),
+          urlDisclaimerMessage: config.uploads.urlDisclaimerMessage,
+          urlExtensionsFilterMode: config.uploads.urlExtensionsFilterMode,
+          urlExtensionsFilter: config.uploads.urlExtensionsFilter,
+          temporaryUploadAges: Array.isArray(config.uploads.temporaryUploadAges) &&
+            config.uploads.temporaryUploadAges.length,
+          gitHash: utils.gitHash
+        }))
+      else if (page === 'faq')
+        safe.get('/faq', (req, res, next) => res.render('faq', {
+          whitelist: config.extensionsFilterMode === 'whitelist',
+          extensionsFilter: config.extensionsFilter,
+          noJsMaxSize: parseInt(config.cloudflare.noJsMaxSize) < parseInt(config.uploads.maxSize),
+          chunkSize: parseInt(config.uploads.chunkSize)
+        }))
+      else
+        safe.get(`/${page}`, (req, res, next) => res.render(page))
+    }
+
+    // Error pages
+    safe.use((req, res, next) => {
+      res.status(404).sendFile(path.join(paths.errorRoot, config.errorPages[404]))
+    })
+
+    safe.use((error, req, res, next) => {
+      logger.error(error)
+      res.status(500).sendFile(path.join(paths.errorRoot, config.errorPages[500]))
+    })
 
-const start = async () => {
-  if (config.showGitHash) {
-    const gitHash = await new Promise((resolve, reject) => {
-      require('child_process').exec('git rev-parse HEAD', (error, stdout) => {
-        if (error) return reject(error)
-        resolve(stdout.replace(/\n$/, ''))
+    // Git hash
+    if (config.showGitHash) {
+      utils.gitHash = await new Promise((resolve, reject) => {
+        require('child_process').exec('git rev-parse HEAD', (error, stdout) => {
+          if (error) return reject(error)
+          resolve(stdout.replace(/\n$/, ''))
+        })
       })
-    }).catch(logger.error)
-    if (!gitHash) return
-    logger.log(`Git commit: ${gitHash}`)
-    safe.set('git-hash', gitHash)
-  }
+      logger.log(`Git commit: ${utils.gitHash}`)
+    }
 
-  const scan = config.uploads.scan
-  if (scan && scan.enabled) {
-    const createScanner = async () => {
-      try {
-        if (!scan.ip || !scan.port)
-          throw new Error('clamd IP or port is missing')
+    // Clamd scanner
+    if (config.uploads.scan && config.uploads.scan.enabled) {
+      const { ip, port } = config.uploads.scan
+      const version = await clamd.version(ip, port)
+      logger.log(`${ip}:${port} ${version}`)
 
-        const version = await clamd.version(scan.ip, scan.port)
-        logger.log(`${scan.ip}:${scan.port} ${version}`)
+      utils.clamd.scanner = clamd.createScanner(ip, port)
+      if (!utils.clamd.scanner)
+        throw 'Could not create clamd scanner'
+    }
 
-        const scanner = clamd.createScanner(scan.ip, scan.port)
-        safe.set('clam-scanner', scanner)
-        return true
-      } catch (error) {
-        logger.error(`[ClamAV]: ${error.toString()}`)
-        return false
-      }
+    // Cache file identifiers
+    if (config.uploads.cacheFileIdentifiers) {
+      utils.idSet = await db.table('files')
+        .select('name')
+        .then(rows => {
+          return new Set(rows.map(row => row.name.split('.')[0]))
+        })
+      logger.log(`Cached ${utils.idSet.size} file identifiers`)
     }
-    if (!await createScanner()) return process.exit(1)
-  }
 
-  if (config.uploads.cacheFileIdentifiers) {
-    // Cache tree of uploads directory
-    const setSize = await new Promise((resolve, reject) => {
-      const uploadsDir = `./${config.uploads.folder}`
-      fs.readdir(uploadsDir, (error, names) => {
-        if (error) return reject(error)
-        const set = new Set()
-        names.forEach(name => set.add(name.split('.')[0]))
-        safe.set('uploads-set', set)
-        resolve(set.size)
-      })
-    }).catch(error => logger.error(error.toString()))
-    if (!setSize) return process.exit(1)
-    logger.log(`Cached ${setSize} identifiers in uploads directory`)
-  }
+    // Binds Express to port
+    await new Promise((resolve, reject) => {
+      try {
+        safe.listen(config.port, () => resolve())
+      } catch (error) {
+        reject(error)
+      }
+    })
 
-  safe.listen(config.port, async () => {
     logger.log(`lolisafe started on port ${config.port}`)
 
-    // safe.fiery.me-exclusive cache control
+    // Cache control (safe.fiery.me)
     if (config.cacheControl) {
-      logger.log('Cache control enabled')
+      logger.log('Cache control enabled, purging...')
       const routes = config.pages.concat(['api/check'])
       const results = await utils.purgeCloudflareCache(routes)
       let errored = false
@@ -224,6 +212,32 @@ const start = async () => {
         logger.log(`Purged ${succeeded} Cloudflare's cache`)
     }
 
+    // Temporary uploads
+    if (Array.isArray(config.uploads.temporaryUploadAges) && config.uploads.temporaryUploadAges.length) {
+      let temporaryUploadsInProgress = false
+      const temporaryUploadCheck = async () => {
+        if (temporaryUploadsInProgress)
+          return
+
+        temporaryUploadsInProgress = true
+        const result = await utils.bulkDeleteExpired()
+
+        if (result.expired.length) {
+          let logMessage = `Deleted ${result.expired.length} expired upload(s)`
+          if (result.failed.length)
+            logMessage += ` but unable to delete ${result.failed.length}`
+
+          logger.log(logMessage)
+        }
+
+        temporaryUploadsInProgress = false
+      }
+      temporaryUploadCheck()
+
+      if (config.uploads.temporaryUploadsInterval)
+        setInterval(temporaryUploadCheck, config.uploads.temporaryUploadsInterval)
+    }
+
     // NODE_ENV=development yarn start
     if (process.env.NODE_ENV === 'development') {
       // Add readline interface to allow evaluating arbitrary JavaScript from console
@@ -242,9 +256,10 @@ const start = async () => {
       }).on('SIGINT', () => {
         process.exit(0)
       })
-      logger.log('Development mode enabled (disabled Nunjucks caching & enabled readline interface)')
+      logger.log('Development mode (disabled nunjucks caching & enabled readline interface)')
     }
-  })
-}
-
-start()
+  } catch (error) {
+    logger.error(error)
+    process.exit(1)
+  }
+})()

+ 6 - 5
package.json

@@ -18,8 +18,9 @@
     "start": "node ./lolisafe.js",
     "startdev": "env NODE_ENV=development node ./lolisafe.js",
     "pm2": "pm2 start --name safe ./lolisafe.js",
+    "cf-purge": "node ./scripts/cf-purge.js",
+    "delete-expired": "node ./scripts/delete-expired.js",
     "thumbs": "node ./scripts/thumbs.js",
-    "cfpurge": "node ./scripts/cfpurge.js",
     "pull": "git stash; git pull; yarn install --production; git stash pop; echo OK."
   },
   "dependencies": {
@@ -29,23 +30,23 @@
     "express": "^4.17.1",
     "express-rate-limit": "^5.0.0",
     "fluent-ffmpeg": "^2.1.2",
-    "helmet": "^3.20.1",
+    "helmet": "^3.21.0",
     "jszip": "^3.2.2",
     "knex": "^0.19.3",
     "multer": "^1.4.2",
     "node-fetch": "^2.6.0",
     "nunjucks": "^3.2.0",
-    "os": "^0.1.1",
     "randomstring": "^1.1.5",
     "readline": "^1.3.0",
     "sharp": "^0.23.0",
-    "sqlite3": "^4.1.0"
+    "sqlite3": "^4.1.0",
+    "systeminformation": "^4.14.8"
   },
   "devDependencies": {
     "eslint": "^6.3.0",
     "eslint-config-standard": "^14.1.0",
     "eslint-plugin-import": "^2.18.2",
-    "eslint-plugin-node": "^9.2.0",
+    "eslint-plugin-node": "^10.0.0",
     "eslint-plugin-promise": "^4.2.1",
     "eslint-plugin-standard": "^4.0.1"
   }

+ 6 - 0
public/css/home.css

@@ -64,15 +64,18 @@
     -webkit-transform: scale(0.86);
     transform: scale(0.86);
   }
+
   25% {
     opacity: 100;
   }
+
   67% {
     -webkit-box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
     box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
     -webkit-transform: scale(1);
     transform: scale(1);
   }
+
   100% {
     -webkit-box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
     box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
@@ -89,15 +92,18 @@
     -webkit-transform: scale(0.86);
     transform: scale(0.86);
   }
+
   25% {
     opacity: 100;
   }
+
   67% {
     -webkit-box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
     box-shadow: 0 0 0 rgba(10, 10, 10, 0), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
     -webkit-transform: scale(1);
     transform: scale(1);
   }
+
   100% {
     -webkit-box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);
     box-shadow: 0 20px 60px rgba(10, 10, 10, 0.05), 0 5px 10px rgba(10, 10, 10, 0.1), 0 1px 1px rgba(10, 10, 10, 0.2);

+ 10 - 1
public/css/style.css

@@ -13,6 +13,7 @@ body {
   0% {
     opacity: 0;
   }
+
   100% {
     opacity: 1;
   }
@@ -22,6 +23,7 @@ body {
   0% {
     opacity: 0;
   }
+
   100% {
     opacity: 1;
   }
@@ -39,6 +41,12 @@ hr {
   background-color: #898b8d;
 }
 
+code,
+.message-body code {
+  background-color: #222528;
+  border-radius: 5px;
+}
+
 .title {
   color: #eff0f1;
 }
@@ -127,7 +135,8 @@ hr {
 }
 
 .progress.is-breeze:indeterminate {
-  background-image: linear-gradient(to right,#60a8dc 30%,#eff0f1 30%);
+  background-image: -webkit-gradient(linear, left top, right top, color-stop(30%, #60a8dc), color-stop(30%, #eff0f1));
+  background-image: linear-gradient(to right, #60a8dc 30%, #eff0f1 30%);
 }
 
 .message {

+ 4 - 7
public/css/sweetalert.css

@@ -31,13 +31,6 @@
   color: #bdc3c7;
 }
 
-.swal-content .is-code {
-  font-family: 'Courier New', Courier, monospace;
-  border: 1px dashed #eff0f1;
-  border-ra