Merge pull request #39 from EbookFoundation/feature/push-filters-2
actually use push target filters when new books are publishedpull/40/head
commit
6b1d668c4e
|
@ -7,6 +7,7 @@
|
|||
|
||||
const HttpError = require('../errors/HttpError')
|
||||
const request = require('request')
|
||||
const uriRegex = /^(.+:\/\/)?(.+\.)*(.+\.).{1,}(:\d+)?(.+)?/i
|
||||
|
||||
module.exports = {
|
||||
publish: async function (req, res) {
|
||||
|
@ -23,21 +24,31 @@ module.exports = {
|
|||
if (bookExists) {
|
||||
throw new HttpError(400, 'Version already exists')
|
||||
} else {
|
||||
result = await Book.create(body).fetch()
|
||||
const { title, isbn, author, publisher } = body
|
||||
// require at least 2 fields to be filled out
|
||||
if ([title, isbn, author, publisher].reduce((a, x) => a + (x ? 1 : 0), 0) >= 2) {
|
||||
result = await Book.create(body).fetch()
|
||||
} else {
|
||||
throw new HttpError(400, 'Please fill out at least 2 fields (title, author, publisher, isbn)')
|
||||
}
|
||||
}
|
||||
|
||||
req.file('opds').upload(sails.config.skipperConfig, async function (err, uploaded) {
|
||||
if (err) {
|
||||
await Book.destroy({ id: result.id })
|
||||
throw new HttpError(500, err.message)
|
||||
}
|
||||
const fd = (uploaded[0] || {}).fd
|
||||
await Book.update({ id: result.id }, { storage: fd })
|
||||
sendUpdatesAsync(result.id)
|
||||
return res.json({
|
||||
...result
|
||||
if (req.file('opds')) {
|
||||
req.file('opds').upload(sails.config.skipperConfig, async function (err, uploaded) {
|
||||
if (err) {
|
||||
await Book.destroy({ id: result.id })
|
||||
throw new HttpError(500, err.message)
|
||||
}
|
||||
const fd = (uploaded[0] || {}).fd
|
||||
await Book.update({ id: result.id }, { storage: fd })
|
||||
sendUpdatesAsync(result.id)
|
||||
return res.json({
|
||||
...result
|
||||
})
|
||||
})
|
||||
})
|
||||
} else {
|
||||
throw new HttpError(400, 'Missing OPDS file upload')
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof HttpError) return e.send(res)
|
||||
return res.status(500).json({
|
||||
|
@ -73,16 +84,27 @@ module.exports = {
|
|||
async function sendUpdatesAsync (id) {
|
||||
const book = await Book.findOne({ id })
|
||||
const targets = await TargetUrl.find()
|
||||
if (!book) return
|
||||
for (const i in targets) {
|
||||
sails.log('sending ' + book.id + ' info to ' + targets[i].url)
|
||||
request.post({
|
||||
url: targets[i].url,
|
||||
headers: { 'User-Agent': 'RoE-aggregator' },
|
||||
form: book
|
||||
}, function (err, httpResp, body) {
|
||||
if (err) {
|
||||
sails.log(`error: failed to send book ${id} to ${targets[i].url}`)
|
||||
}
|
||||
})
|
||||
const item = targets[i]
|
||||
const { author: fAuthor, publisher: fPublisher, title: fTitle, isbn: fIsbn, url } = item
|
||||
const { author: bAuthor, publisher: bPublisher, title: bTitle, isbn: bIsbn } = book
|
||||
sails.log('sending ' + book.id + ' info to ' + url)
|
||||
|
||||
if (uriRegex.test(url)) {
|
||||
if (fAuthor && !((bAuthor || '').includes(fAuthor))) continue
|
||||
if (fPublisher && !((bPublisher || '').includes(fPublisher))) continue
|
||||
if (fTitle && !((bTitle || '').includes(fTitle))) continue
|
||||
if (fIsbn && !((bIsbn || '').includes(fIsbn))) continue
|
||||
request.post({
|
||||
url: url,
|
||||
headers: { 'User-Agent': 'RoE-aggregator' },
|
||||
form: book
|
||||
}, function (err, httpResp, body) {
|
||||
if (err) {
|
||||
sails.log(`error: failed to send book ${id} to ${url}`)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@ module.exports = {
|
|||
},
|
||||
title: { type: 'string', required: true },
|
||||
author: { type: 'string' },
|
||||
publisher: { type: 'string' },
|
||||
isbn: { type: 'string' },
|
||||
version: { type: 'string' }
|
||||
|
||||
|
|
|
@ -14,7 +14,15 @@ const rateLimiter = rateLimit({
|
|||
windowMs: 10 * 60 * 1000, // 10 minutes
|
||||
max: 100, // limit each IP to 100 requests per windowMs
|
||||
skip (req, res) {
|
||||
return !req.path.startsWith('/api')
|
||||
return !req.path.startsWith('/api') || req.path.startsWith('/api/publish')
|
||||
}
|
||||
})
|
||||
|
||||
const publishLimiter = rateLimit({
|
||||
windowMs: 1000 * 60 * 60 * 24, // 24 hours
|
||||
max: 1000, // 1000 publish requests per day
|
||||
skip (req, res) {
|
||||
return !req.path.startsWith('/api/publish')
|
||||
}
|
||||
})
|
||||
|
||||
|
@ -40,6 +48,7 @@ module.exports.http = {
|
|||
|
||||
order: [
|
||||
'rateLimit',
|
||||
'publishLimit',
|
||||
'cookieParser',
|
||||
'session',
|
||||
'passportInit',
|
||||
|
@ -52,6 +61,7 @@ module.exports.http = {
|
|||
'favicon'
|
||||
],
|
||||
rateLimit: rateLimiter,
|
||||
publishLimit: publishLimiter,
|
||||
passportInit: require('passport').initialize(),
|
||||
passportSession: require('passport').session()
|
||||
|
||||
|
|
|
@ -0,0 +1,15 @@
|
|||
exports.up = function (knex, Promise) {
|
||||
return Promise.all([
|
||||
knex.schema.table('book', t => {
|
||||
t.string('publisher')
|
||||
})
|
||||
])
|
||||
}
|
||||
|
||||
exports.down = function (knex, Promise) {
|
||||
return Promise.all([
|
||||
knex.schema.table('book', t => {
|
||||
t.dropColumns('publisher')
|
||||
})
|
||||
])
|
||||
}
|
|
@ -21,7 +21,8 @@
|
|||
"debug": "node --inspect app.js",
|
||||
"custom-tests": "echo 'Nothing yet'",
|
||||
"db:migrate": "knex migrate:latest",
|
||||
"db:rollback": "knex migrate:rollback"
|
||||
"db:rollback": "knex migrate:rollback",
|
||||
"g:migration": "knex migrate:make"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sailshq/connect-redis": "^3.2.1",
|
||||
|
|
Loading…
Reference in New Issue