Skip to content
Snippets Groups Projects
Verified Commit bd143eea authored by Isabella Skořepová's avatar Isabella Skořepová
Browse files

Revert "Custom sftp upload script" and related

This reverts commit 6a3bac13.
parent 2e6d8760
No related branches found
No related tags found
No related merge requests found
Pipeline #
...@@ -24,7 +24,6 @@ ...@@ -24,7 +24,6 @@
"nunjucks": "^2.3.0", "nunjucks": "^2.3.0",
"nunjucks-date-filter": "^0.1.1", "nunjucks-date-filter": "^0.1.1",
"sqlite-parser": "^0.14.3", "sqlite-parser": "^0.14.3",
"ssh2": "^0.5.0",
"syntax-error": "^1.1.5", "syntax-error": "^1.1.5",
"toml": "^2.3.0", "toml": "^2.3.0",
"toml-js": "0.0.8", "toml-js": "0.0.8",
......
var crypto = require('crypto');
var walk = require('walk');
var fs = require('fs');
var path = require('path');
var cli = require('cli');
var options = cli.parse({
sourcedir: [null, 'From where to upload (required)', 'string'],
targetdir: [null, 'Target directory for upload (required)', 'string'],
server: ['s', 'SFTP server (required)', 'string'],
user: ['u', 'SFTP user (required if server != "localdirectory")', 'string'],
port: ['p', 'port', 'number', 22]
});
function missingArguments(arg) { console.log('Missing required arguments '+arg+'. Use --help.')}
if(!options.sourcedir) return missingArguments('sourcedir');
if(!options.server) return missingArguments('server');
if(!options.user && options.server != 'localdirectory') return missingArguments('user');
if(!options.targetdir) return missingArguments('targetdir');
if(!process.env.PASSWORD && options.server != 'localdirectory')
return console.log('Missing environment variable PASSWORD');
var remoteFS;
if(options.server == 'localdirectory') {
remoteFS = {
remotedir: '',
setDir: function(dir) {
remoteFS.remotedir = dir;
},
connect: function() {
return Promise.resolve();
},
close: function() {
},
uploadFile: function(localfile, remotefile) {
function copyFile(source, target, cb) {
var cbCalled = false;
var rd = fs.createReadStream(source);
rd.on('error', function(err) {
done(err);
});
var wr = fs.createWriteStream(target);
wr.on('error', function(err) {
done(err);
});
wr.on('close', function(ex) {
done();
});
rd.pipe(wr);
function done(err) {
if (!cbCalled) {
cb(err);
cbCalled = true;
}
}
}
return new Promise(function(resolve, reject) {
console.log(' Uploading '+localfile+' to '+remotefile);
copyFile(localfile, remoteFS.remotedir+'/'+remotefile, function(err) {
if(err) console.log(' [Warning] Error uploading '+localfile+' to '+remotefile);
else console.log(' Uploaded',localfile,'to',remotefile);
resolve();
});
})
},
readFile: function(file) {
return new Promise(function(resolve, reject) {
var data = '';
fs.createReadStream(remoteFS.remotedir+'/'+file, 'utf-8')
.on('data', function(chunk) {
data+=chunk;
})
.on('end', function() {
resolve(data);
})
.on('error', function(e) {
reject(e);
})
})
},
delete: function(file) {
return new Promise(function(resolve, reject) {
fs.unlink(remoteFS.remotedir+'/'+file, function(err) {
if(err) console.log(' [Warning] Error deleting '+file);
resolve();
})
})
},
mkdir: function(file) {
return new Promise(function(resolve, reject) {
console.log(' Creating directory '+file);
fs.mkdir(remoteFS.remotedir+'/'+file,function() {
console.log(' Created directory '+file);
resolve();
})
})
}
}
} else {
remoteFS = {
conn: new (require('ssh2').Client)(),
sftp: null,
remotedir: '',
setDir: function(dir) {
remoteFS.remotedir = dir;
},
connect: function() {
return new Promise(function(resolve, reject) {
var connReady = false;
remoteFS.conn.once('ready', function() {
console.log('Client :: ready');
remoteFS.conn.sftp(function(err, sftp) {
if(err) {
reject(new Error('Failed to initialize SFTP'));
} else {
remoteFS.sftp = sftp;
resolve();
}
})
})
.once('error', function() {
reject(new Error('Failed to connect to server'));
})
.connect({
host: options.server,
port: options.port,
username: options.user,
password: process.env.PASSWORD
});
})
},
close: function() {
remoteFS.conn.end();
},
uploadFile: function(localfile, remotefile) {
return new Promise(function(resolve, reject) {
console.log(' Uploading '+localfile+' to '+remotefile);
remoteFS.sftp.fastPut(localfile, remoteFS.remotedir+'/'+remotefile, function(err) {
if(err) console.log(' [Warning] Error uploading '+localfile+' to '+remotefile);
else console.log(' Uploaded',localfile,'to',remotefile);
resolve();
})
})
},
readFile: function(file) {
return new Promise(function(resolve, reject) {
var data = '';
remoteFS.sftp.createReadStream(remoteFS.remotedir+'/'+file, 'utf-8')
.on('data', function(chunk) {
data+=chunk;
})
.on('end', function() {
resolve(data);
})
.on('error', function(e) {
reject(e);
})
})
},
delete: function(file) {
return new Promise(function(resolve, reject) {
remoteFS.sftp.unlink(remoteFS.remotedir+'/'+file, function(err) {
if(err) console.log(' [Warning] Error deleting '+file);
resolve();
})
})
},
mkdir: function(file) {
return new Promise(function(resolve, reject) {
console.log(' Creating directory '+file);
remoteFS.sftp.mkdir(remoteFS.remotedir+'/'+file,function() {
console.log(' Created directory '+file);
resolve();
})
})
}
}
}
function checksum (str, algorithm, encoding) {
return crypto
.createHash(algorithm || 'sha256')
.update(str, 'utf8')
.digest(encoding || 'hex')
}
function fileChecksum(file) {
return new Promise(function(resolve, reject) {
fs.readFile(file, function (err, data) {
if(err) reject(new Error('Error reading file '+ file+' '+err));
resolve({ file: file, checksum: checksum(data)});
});
})
}
new Promise(function(resolve, reject) {
console.log('Upload step: Listing files');
var files = [];
walk.walk(options.sourcedir)
.on('file',function(root,fileStats,next) {
var filename = path.join(root,fileStats.name);
files.push(filename);
next();
})
.on('errors',function(root, nodeStatsArray, next) {
console.log('Walker error', root, nodeStatsArray);
next();
})
.on('end', function() {
resolve(files);
})
})
.then(function(files) {
console.log('Upload step: Generating checksums');
var promises = [];
files.forEach(function(file) {
promises.push(fileChecksum(file))
})
return Promise.all(promises);
})
.then(function(localchecksums) {
console.log('Upload step: Making paths relative')
var r = [];
localchecksums.forEach(function(cs) {
cs.file = path.relative(options.sourcedir, cs.file);
r.push(cs);
})
return r;
})
.then(function(arg) {
console.log('Upload step: Connecting to server');
return remoteFS.connect()
.then(function() {
remoteFS.setDir(options.targetdir);
console.log(' Connected');
return arg;
})
})
.then(function(localchecksums) {
console.log('Upload step: Getting remote checksums');
return new Promise(function(resolve,reject){
return remoteFS.readFile('checksums.json')
.then(function(data) {
resolve({local: localchecksums, remote: JSON.parse(data)});
})
.catch(function(e) {
console.log('Failed to read remote checksums.json');
resolve({local: localchecksums, remote: []});
})
})
})
/*
.then(function(obj) {
console.log('Upload step: Listing remote directory');
return new Promise(function(resolve, reject) {
var files = [];
var readDir = function(dir) {
console.log('Reading dir', dir);
return new Promise(function(resolve, reject) {
obj.sftp.readdir(dir, function(err, list) {
if (err) reject(err);
var promises = [];
list.forEach(function(entry) {
if(entry.longname.substring(0,1) == 'd') { // directory
promises.push(readDir(dir+'/'+entry.filename));
} else {
promises.push(Promise.resolve(dir+'/'+entry.filename));
}
})
resolve(Promise.all(promises));
});
});
}
var flatten = function(array) {
return [].concat.apply([], array);
}
readDir(options.targetdir)
.then(flatten)
.then(function(list) {
obj.remoteFiles = list;
resolve(obj);
})
});
})
*/
.then(function(obj) {
console.log('Upload step: Deleting old files');
var localAssoc = {};
obj.local.forEach(function(entry) {
localAssoc[entry.file] = entry.checksum;
})
var promises = [];
obj.remote.forEach(function(entry) {
if(localAssoc[entry.file] != entry.checksum && entry.file != 'checksums.json') {
console.log(' deleting',entry.file);
promises.push(remoteFS.delete(entry.file));
}
})
return Promise.all(promises)
.then(function(){
return obj;
})
})
.then(function(obj) {
console.log('Upload step: Creating required directories');
var remoteAssoc = {};
obj.remote.forEach(function(entry) {
remoteAssoc[entry.file] = entry.checksum;
})
var toUpload = [];
obj.local.forEach(function(entry) {
if(remoteAssoc[entry.file] != entry.checksum) {
var dir = path.dirname(entry.file);
if(dir !== '.') toUpload.push(dir);
}
})
function uniq(a) {
var seen = {};
return a.filter(function(item) {
return seen.hasOwnProperty(item) ? false : (seen[item] = true);
});
}
toUpload = uniq(toUpload);
var pushParents = function(dir) {
var parent = path.dirname(dir);
if(parent == '.') return;
toUpload.push(parent);
pushParents(parent);
}
toUpload.forEach(function(file) {
pushParents(file);
})
toUpload = uniq(toUpload.reverse());
var promises = [];
toUpload.forEach(function(dir) {
promises.push(remoteFS.mkdir(dir));
})
return Promise.all(promises)
.then(function() {
return obj;
})
})
.then(function(obj) {
console.log('Upload step: Uploading new files');
var remoteAssoc = {};
obj.remote.forEach(function(entry) {
remoteAssoc[entry.file] = entry.checksum;
})
var promises = [];
obj.local.forEach(function(entry) {
if(remoteAssoc[entry.file] != entry.checksum) {
promises.push(remoteFS.uploadFile(options.sourcedir+'/'+entry.file,entry.file));
}
})
return Promise.all(promises)
.then(function(){
return obj;
})
})
.then(function(obj) {
console.log('Upload step: Uploading checksums.json');
return new Promise(function(resolve, reject) {
console.log(' Creating checksums.json');
fs.writeFile(options.sourcedir+'/checksums.json', JSON.stringify(obj.local, null, ' '), 'utf-8', function() {
resolve();
});
})
.then(function() {
console.log(' Removing remote checksums.json');
return remoteFS.delete('checksums.json')
})
.then(function() {
console.log(' Uploading checksums.json');
return remoteFS.uploadFile(options.sourcedir+'/checksums.json','checksums.json');
})
.then(function() {
console.log(' Removing local checksums.json');
return new Promise(function(resolve, reject){
fs.unlink(options.sourcedir+'/checksums.json', function() {
resolve();
})
})
})
})
.then(function() {
remoteFS.close();
})
.catch(function(e) {
remoteFS.close();
console.log(e.stack);
})
...@@ -4,15 +4,6 @@ then ...@@ -4,15 +4,6 @@ then
echo -n "Heslo pro upload: " echo -n "Heslo pro upload: "
read -s LFTP_PASSWORD; export LFTP_PASSWORD read -s LFTP_PASSWORD; export LFTP_PASSWORD
fi fi
export PASSWORD=$LFTP_PASSWORD
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
USER=ok1kvk.cz-www-nove USER=ok1kvk.cz-www-nove
HOST=krios.blueboard.cz HOST=krios.blueboard.cz
...@@ -35,7 +26,16 @@ if [ "$1" == "ftp" ]; then ...@@ -35,7 +26,16 @@ if [ "$1" == "ftp" ]; then
echo "LFTP finished with return code $RET" echo "LFTP finished with return code $RET"
else else
echo "Using SFTP" echo "Using SFTP"
/usr/bin/time -f "Upload took %e" -- node $DIR/sftp-sync.js --sourcedir build --server $HOST --user $USER --remotedir / --port 2121 time lftp -e "set sftp:auto-confirm yes;\
set cmd:fail-exit yes;\
set net:timeout 5;\
set net:reconnect-interval-base $RECONNECT_INTERVAL;\
set net:max-retries $MAX_RETRIES;\
open --user $USER --env-password -p 2121 sftp://$HOST/;\
mirror -c --verbose=9 -e -R -L ./build /;\
exit 0;"
RET=$?
echo "LFTP finished with return code $RET"
fi fi
exit 0 exit 0
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment