Config hash to prevent updates, Jenkins integration
gitea/docker-discover/master This commit looks good
Details
gitea/docker-discover/master This commit looks good
Details
This commit is contained in:
parent
a4e48209b6
commit
c4863c4ca2
|
@ -1 +1,2 @@
|
|||
node_modules
|
||||
node_modules
|
||||
tmp
|
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env groovy
|
||||
|
||||
pipeline {
|
||||
agent any
|
||||
environment {
|
||||
PROJECT_NAME = 'docker-discover'
|
||||
}
|
||||
stages {
|
||||
stage('Checkout') {
|
||||
steps {
|
||||
script {
|
||||
if (!env.GIT_COMMITTER_EMAIL) {
|
||||
env.COMMITTER_EMAIL = sh(
|
||||
script: 'git --no-pager show -s --format="%ae"',
|
||||
returnStdout: true
|
||||
).trim()
|
||||
} else {
|
||||
env.COMMITTER_EMAIL = env.GIT_COMMITTER_EMAIL;
|
||||
}
|
||||
}
|
||||
|
||||
sh 'printenv'
|
||||
}
|
||||
}
|
||||
stage('Build') {
|
||||
when {
|
||||
branch 'master'
|
||||
}
|
||||
environment {
|
||||
CREDS = credentials('docker-registry')
|
||||
}
|
||||
steps {
|
||||
sh 'docker login --username $CREDS_USR --password $CREDS_PSW $REGISTRY'
|
||||
sh 'docker-compose build --parallel'
|
||||
sh 'docker-compose push'
|
||||
}
|
||||
}
|
||||
stage('Deploy') {
|
||||
when {
|
||||
branch 'master'
|
||||
}
|
||||
steps {
|
||||
sh "docker stack deploy --with-registry-auth --prune --compose-file docker-compose.yml ${env.PROJECT_NAME}"
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
always {
|
||||
script {
|
||||
if (!env.COMMITTER_EMAIL) return
|
||||
try {
|
||||
mail(
|
||||
to: env.COMMITTER_EMAIL,
|
||||
subject: "Pipeline: ${env.JOB_NAME} (${env.BUILD_NUMBER}): ${currentBuild.currentResult}",
|
||||
body: "Check status at ${env.BUILD_URL}"
|
||||
)
|
||||
} catch (e) {
|
||||
echo e.toString()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
delay: 4
|
||||
debug: true
|
||||
debug: false
|
||||
events: [service, node]
|
||||
docker:
|
||||
socketPath: /var/run/docker.sock
|
||||
|
|
|
@ -7,6 +7,7 @@ services:
|
|||
NODE_ENV: production
|
||||
volumes:
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
- data:/tmp/docker-discover
|
||||
configs:
|
||||
- source: config
|
||||
target: /docker-discover/config.yml
|
||||
|
@ -27,3 +28,5 @@ configs:
|
|||
ssh:
|
||||
external: true
|
||||
name: discover_ssh
|
||||
volumes:
|
||||
data:
|
||||
|
|
48
index.js
48
index.js
|
@ -3,14 +3,23 @@ let Docker = require('dockerode');
|
|||
let handlebars = require('handlebars');
|
||||
let ssh = require('node-ssh');
|
||||
let fs = require('fs');
|
||||
let shajs = require('sha.js');
|
||||
let conf = require('./config.yml');
|
||||
let package = require('./package.json');
|
||||
|
||||
let docker;
|
||||
let template;
|
||||
let lastInfoHash;
|
||||
let appName = package.name;
|
||||
let isProduction = process.env.NODE_ENV === 'production';
|
||||
let tmpDir = isProduction ? `/tmp/${appName}` : `${__dirname}/tmp`;
|
||||
let hashFile = `${tmpDir}/config.hash`;
|
||||
|
||||
async function updateProxy() {
|
||||
console.log('Updating reverse proxy configuration.');
|
||||
|
||||
// Obtaining Docker settings
|
||||
|
||||
let info;
|
||||
|
||||
if (!isProduction) {
|
||||
|
@ -28,7 +37,7 @@ async function updateProxy() {
|
|||
if (!Array.isArray(ports) || !ports.length) continue;
|
||||
|
||||
let name = serviceInfo.Spec.Name;
|
||||
let match = name.match(/(.+)_main$/);
|
||||
let match = name.match(/^(.+)_main$/);
|
||||
if (match) name = match[1];
|
||||
|
||||
let service = {
|
||||
|
@ -50,17 +59,36 @@ async function updateProxy() {
|
|||
}
|
||||
}
|
||||
|
||||
let tmpConf = `/tmp/rproxy.${new Date().getTime()}`;
|
||||
// Cheking settings hash
|
||||
|
||||
let infoHash = shajs('sha256')
|
||||
.update(JSON.stringify(services))
|
||||
.digest('hex');
|
||||
console.log('Settings hash:', infoHash);
|
||||
|
||||
if (lastInfoHash == infoHash) {
|
||||
console.log(`Settings haven't changed, aborting.`);
|
||||
return;
|
||||
}
|
||||
|
||||
lastInfoHash = infoHash;
|
||||
fs.writeFileSync(hashFile, infoHash);
|
||||
|
||||
// Creating configuration file
|
||||
|
||||
let tmpConf = `${tmpDir}/config.cfg`;
|
||||
let configString = template({info, services});
|
||||
fs.writeFileSync(tmpConf, configString);
|
||||
|
||||
if (conf.debug || !isProduction) {
|
||||
if (conf.debug) {
|
||||
let delimiter = '#'.repeat(80);
|
||||
console.log(delimiter);
|
||||
console.log(configString);
|
||||
console.log(delimiter);
|
||||
}
|
||||
|
||||
// Updating reverse proxies
|
||||
|
||||
let files = {
|
||||
local: tmpConf,
|
||||
remote: conf.rproxy.confPath
|
||||
|
@ -78,14 +106,26 @@ async function updateProxy() {
|
|||
await sshClient.dispose();
|
||||
}
|
||||
|
||||
fs.unlinkSync(tmpConf);
|
||||
console.log('Configuration updated.');
|
||||
}
|
||||
|
||||
(async() => {
|
||||
console.log('Initializing.');
|
||||
let timeoutId;
|
||||
docker = new Docker(conf.docker);
|
||||
template = handlebars.compile(fs.readFileSync('rproxy.handlebars', 'utf8'));
|
||||
|
||||
try {
|
||||
fs.mkdirSync(tmpDir);
|
||||
} catch (err) {
|
||||
if (err.code != 'EEXIST') throw err;
|
||||
}
|
||||
|
||||
if (fs.existsSync(hashFile)) {
|
||||
lastInfoHash = fs.readFileSync(hashFile, 'utf8');
|
||||
console.log('Saved settings hash:', lastInfoHash);
|
||||
}
|
||||
|
||||
await updateProxy();
|
||||
|
||||
console.log('Listenig for events.')
|
||||
|
|
|
@ -314,6 +314,15 @@
|
|||
}
|
||||
}
|
||||
},
|
||||
"sha.js": {
|
||||
"version": "2.4.11",
|
||||
"resolved": "https://registry.npmjs.org/sha.js/-/sha.js-2.4.11.tgz",
|
||||
"integrity": "sha512-QMEp5B7cftE7APOjk5Y6xgrbWu+WkLVQwk8JNjZ8nKRciZaByEW6MubieAiToS7+dwvrjGhH8jRXz3MVd0AYqQ==",
|
||||
"requires": {
|
||||
"inherits": "^2.0.1",
|
||||
"safe-buffer": "^5.0.1"
|
||||
}
|
||||
},
|
||||
"shell-escape": {
|
||||
"version": "0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/shell-escape/-/shell-escape-0.2.0.tgz",
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
"dockerode": "^3.0.2",
|
||||
"handlebars": "^4.7.2",
|
||||
"node-ssh": "^7.0.0",
|
||||
"require-yaml": "0.0.1"
|
||||
"require-yaml": "0.0.1",
|
||||
"sha.js": "^2.4.11"
|
||||
}
|
||||
}
|
||||
|
|
Reference in New Issue