Merge pull request #39 from EbookFoundation/feature/push-filters-2

actually use push target filters when new books are published
pull/40/head
Theodore Kluge 2019-02-20 13:30:31 -05:00 committed by GitHub
commit 6b1d668c4e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 73 additions and 24 deletions

View File

@ -7,6 +7,7 @@
const HttpError = require('../errors/HttpError')
const request = require('request')
const uriRegex = /^(.+:\/\/)?(.+\.)*(.+\.).{1,}(:\d+)?(.+)?/i
module.exports = {
publish: async function (req, res) {
@ -23,9 +24,16 @@ module.exports = {
if (bookExists) {
throw new HttpError(400, 'Version already exists')
} else {
const { title, isbn, author, publisher } = body
// require at least 2 fields to be filled out
if ([title, isbn, author, publisher].reduce((a, x) => a + (x ? 1 : 0), 0) >= 2) {
result = await Book.create(body).fetch()
} else {
throw new HttpError(400, 'Please fill out at least 2 fields (title, author, publisher, isbn)')
}
}
if (req.file('opds')) {
req.file('opds').upload(sails.config.skipperConfig, async function (err, uploaded) {
if (err) {
await Book.destroy({ id: result.id })
@ -38,6 +46,9 @@ module.exports = {
...result
})
})
} else {
throw new HttpError(400, 'Missing OPDS file upload')
}
} catch (e) {
if (e instanceof HttpError) return e.send(res)
return res.status(500).json({
@ -73,16 +84,27 @@ module.exports = {
async function sendUpdatesAsync (id) {
const book = await Book.findOne({ id })
const targets = await TargetUrl.find()
if (!book) return
for (const i in targets) {
sails.log('sending ' + book.id + ' info to ' + targets[i].url)
const item = targets[i]
const { author: fAuthor, publisher: fPublisher, title: fTitle, isbn: fIsbn, url } = item
const { author: bAuthor, publisher: bPublisher, title: bTitle, isbn: bIsbn } = book
sails.log('sending ' + book.id + ' info to ' + url)
if (uriRegex.test(url)) {
if (fAuthor && !((bAuthor || '').includes(fAuthor))) continue
if (fPublisher && !((bPublisher || '').includes(fPublisher))) continue
if (fTitle && !((bTitle || '').includes(fTitle))) continue
if (fIsbn && !((bIsbn || '').includes(fIsbn))) continue
request.post({
url: targets[i].url,
url: url,
headers: { 'User-Agent': 'RoE-aggregator' },
form: book
}, function (err, httpResp, body) {
if (err) {
sails.log(`error: failed to send book ${id} to ${targets[i].url}`)
sails.log(`error: failed to send book ${id} to ${url}`)
}
})
}
}
}

View File

@ -19,6 +19,7 @@ module.exports = {
},
title: { type: 'string', required: true },
author: { type: 'string' },
publisher: { type: 'string' },
isbn: { type: 'string' },
version: { type: 'string' }

View File

@ -14,7 +14,15 @@ const rateLimiter = rateLimit({
windowMs: 10 * 60 * 1000, // 10 minutes
max: 100, // limit each IP to 100 requests per windowMs
skip (req, res) {
return !req.path.startsWith('/api')
return !req.path.startsWith('/api') || req.path.startsWith('/api/publish')
}
})
const publishLimiter = rateLimit({
windowMs: 1000 * 60 * 60 * 24, // 24 hours
max: 1000, // 1000 publish requests per day
skip (req, res) {
return !req.path.startsWith('/api/publish')
}
})
@ -40,6 +48,7 @@ module.exports.http = {
order: [
'rateLimit',
'publishLimit',
'cookieParser',
'session',
'passportInit',
@ -52,6 +61,7 @@ module.exports.http = {
'favicon'
],
rateLimit: rateLimiter,
publishLimit: publishLimiter,
passportInit: require('passport').initialize(),
passportSession: require('passport').session()

View File

@ -0,0 +1,15 @@
exports.up = function (knex, Promise) {
return Promise.all([
knex.schema.table('book', t => {
t.string('publisher')
})
])
}
exports.down = function (knex, Promise) {
return Promise.all([
knex.schema.table('book', t => {
t.dropColumns('publisher')
})
])
}

View File

@ -21,7 +21,8 @@
"debug": "node --inspect app.js",
"custom-tests": "echo 'Nothing yet'",
"db:migrate": "knex migrate:latest",
"db:rollback": "knex migrate:rollback"
"db:rollback": "knex migrate:rollback",
"g:migration": "knex migrate:make"
},
"dependencies": {
"@sailshq/connect-redis": "^3.2.1",