Added a build script that will push newly build code to avid habit

Added request timeout to prevent long requests from holding up note saving
Added header to request to try and simulate google crawler
This commit is contained in:
Max G 2019-08-03 21:03:35 +00:00
parent 1db2a79131
commit dd0205a3c1
2 changed files with 70 additions and 1 deletions

34
buildAndUpdateProd.sh Executable file
View File

@ -0,0 +1,34 @@
#!/bin/bash
#
# Push built release files to production server
#
echo -e "\e[32m\nStarting Build, hold onto your parts... \n\e[0m"
# Build out new release
cd client
npm run build
cd ..
# Remove old releases
rm release.tar.gz
# only compress client/dist and server with node_modules
echo -e "\e[32m\nCompressing client and server code... \n\e[0m"
tar -czf release.tar.gz server node_modules client/dist package.json
#send compressed release to remote machine
echo -e "\e[32m\nMoving compressed release to production... \n\e[0m"
rsync -e 'ssh -p 13328' -havzC --update release.tar.gz mab@avidhabit.com:/home/mab/pi/
# Remove Release from local after its been uploaded
rm release.tar.gz
#uncompress release on server
echo -e "\e[32m\nExtracting release on production... \n\e[0m"
ssh mab@avidhabit.com -p 13328 "cd /home/mab/pi/; rm -r server node_modules client; tar -xzf *.tar.gz; rm *.tar.gz; pm2 reload all"
#Congratulate how awesome you are
echo -e "\e[32m\nRelease Complete! Nice Work! \n\e[0m"

View File

@ -112,15 +112,26 @@ Attachment.processUrl = (userId, noteId, url) => {
var removeWhitespace = /\s+/g var removeWhitespace = /\s+/g
// console.log('Scraping ', website) // console.log('Scraping ', website)
const options = { const options = {
uri: url, uri: url,
simple: true,
timeout: 1000 * 10, // 10 seconds
headers: {
'User-Agent':'Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)' //Simulate google headers
},
transform: function (body) { transform: function (body) {
return cheerio.load(body); return cheerio.load(body);
} }
} }
rp(options).then($ => { let requestTimeout = null
let request = rp(options)
.then($ => {
clearTimeout(requestTimeout)
var desiredSearchText = '' var desiredSearchText = ''
@ -191,5 +202,29 @@ Attachment.processUrl = (userId, noteId, url) => {
.catch(console.log) .catch(console.log)
}) })
.catch(error => {
console.log('Issue with scrape')
console.log(error)
resolve('')
})
requestTimeout = setTimeout( () => {
console.log('Cancel the request, its taking to long.')
request.cancel()
desiredSearchText = 'Unable to Scrape URL at this time'
const created = Math.round((+new Date)/1000)
//Create attachment in DB with scrape text and provided data
db.promise()
.query(`INSERT INTO attachment
(note_id, user_id, attachment_type, text, url, last_indexed)
VALUES (?, ?, ?, ?, ?, ?)`, [noteId, userId, 1, desiredSearchText, url, created])
.then((rows, fields) => {
resolve(desiredSearchText) //Return found text
})
.catch(console.log)
}, (5000))
}) })
} }