generated from container/tmpl
Julin
2 years ago
12 changed files with 408 additions and 0 deletions
@ -0,0 +1,26 @@ |
|||
{ |
|||
// Use IntelliSense to learn about possible attributes. |
|||
// Hover to view descriptions of existing attributes. |
|||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 |
|||
"version": "0.2.0", |
|||
"configurations": [ |
|||
{ |
|||
"type": "node", |
|||
"request": "launch", |
|||
"name": "Launch Program", |
|||
"skipFiles": [ |
|||
"<node_internals>/**" |
|||
], |
|||
"program": "${workspaceFolder}\\index.js", |
|||
"args": [ |
|||
"-g postgres://FashionAdmin:123456@10.8.30.36:5432/pepca", |
|||
"--clickhouseUrl http://10.8.30.71", |
|||
"--clickhousePort 30123", |
|||
"--clickhouseDbPepcaM pg_pepca_m" |
|||
], |
|||
"env": { |
|||
"NODE_ENV": "development" |
|||
} |
|||
} |
|||
] |
|||
} |
@ -0,0 +1,7 @@ |
|||
FROM repository.anxinyun.cn/base-images/nodejs12:20.10.12.2 |
|||
|
|||
COPY . /var/app |
|||
|
|||
WORKDIR /var/app |
|||
|
|||
ENTRYPOINT [ "node", "index.js" ] |
@ -0,0 +1,91 @@ |
|||
const args = require('args'); |
|||
const path = require('path'); |
|||
const moment = require('moment'); |
|||
|
|||
const dev = process.env.NODE_ENV == 'development'; |
|||
|
|||
// 启动参数
|
|||
args.option(['g', 'postgresUrl'], 'PostgreSQL服务器URL'); |
|||
|
|||
// clickHouse
|
|||
args.option('clickhouseUrl', 'ClickHouse URL'); |
|||
args.option('clickhousePort', 'ClickHouse Port'); |
|||
args.option('clickhouseDbPepcaM', 'ClickHouse pepca 数据库名称'); |
|||
|
|||
const flags = args.parse(process.argv); |
|||
|
|||
const POSTGRES_URL = process.env.POSTGRES_URL || flags.postgresUrl; |
|||
const CLICKHOUST_URL = process.env.CLICKHOUST_URL || flags.clickhouseUrl; |
|||
const CLICKHOUST_PORT = process.env.CLICKHOUST_PORT || flags.clickhousePort; |
|||
const CLICKHOUST_USERNAME = process.env.CLICKHOUST_USERNAME || flags.clickhouseUsername; |
|||
const CLICKHOUST_PASSWORD = process.env.CLICKHOUST_PASSWORD || flags.clickhousePassword; |
|||
const CLICKHOUST_DB_PEP_CA_M = process.env.CLICKHOUST_DB_PEP_CA_M || flags.clickhouseDbPepcaM; |
|||
|
|||
if (!POSTGRES_URL || !CLICKHOUST_URL || !CLICKHOUST_PORT || !CLICKHOUST_DB_PEP_CA_M) { |
|||
console.log('缺少启动参数,异常退出'); |
|||
args.showHelp(); |
|||
process.exit(1); |
|||
} |
|||
|
|||
const product = { |
|||
postgres: { |
|||
url: POSTGRES_URL, |
|||
opts: { |
|||
pool: { |
|||
max: 20, |
|||
min: 10, |
|||
idle: 10000 |
|||
}, |
|||
define: { |
|||
freezeTableName: true, // 固定表名
|
|||
timestamps: false // 不含列 "createAt"/"updateAt"/"DeleteAt"
|
|||
}, |
|||
timezone: '+08:00', |
|||
logging: false |
|||
}, |
|||
models: [ |
|||
require('./lib/models') |
|||
] |
|||
}, |
|||
clickhouse: { |
|||
url: CLICKHOUST_URL, |
|||
port: CLICKHOUST_PORT, |
|||
username: CLICKHOUST_USERNAME, |
|||
password: CLICKHOUST_PASSWORD, |
|||
databases: [{ |
|||
key: 'pg_pepca_m', |
|||
name: CLICKHOUST_DB_PEP_CA_M |
|||
}] |
|||
}, |
|||
logger: { |
|||
level: 'info', |
|||
json: false, |
|||
filename: path.join(__dirname, 'log', 'runtime.txt'), |
|||
colorize: false, |
|||
maxsize: 1024 * 1024 * 5, |
|||
zippedArchive: true, |
|||
maxFiles: 10, |
|||
prettyPrint: true, |
|||
label: '', |
|||
timestamp: () => moment().format('YYYY-MM-DD HH:mm:ss.SSS'), |
|||
tailable: true, |
|||
depth: null, |
|||
showLevel: true, |
|||
maxRetries: 1 |
|||
} |
|||
}; |
|||
|
|||
const development = { |
|||
postgres: product.postgres, |
|||
clickhouse: product.clickhouse, |
|||
logger: product.logger |
|||
}; |
|||
|
|||
if (dev) { |
|||
development.logger.filename = path.join(__dirname, 'log', 'development.log'); |
|||
development.logger.level = 'debug'; |
|||
|
|||
development.postgres.opts.logging = console.log; |
|||
} |
|||
|
|||
module.exports = dev ? development : product; |
@ -0,0 +1,27 @@ |
|||
/** |
|||
* Created by Julin on 2022/11/07. |
|||
*/ |
|||
'use strict'; |
|||
|
|||
const schedule = require('node-schedule'); |
|||
const config = require('./config'); |
|||
const logger = require('./lib/logger'); |
|||
const clickhouse = require('./lib/dc/clickhouse'); |
|||
const postgres = require('./lib/dc/postgres'); |
|||
const statProcessNodes = require('./lib/statProcessNodes'); |
|||
|
|||
(function () { |
|||
logger(config.logger); |
|||
|
|||
clickhouse(config.clickhouse); |
|||
postgres(config.postgres); |
|||
|
|||
statProcessNodes(); |
|||
|
|||
process.logger.info('[FS-STATS]', 'started.'); |
|||
|
|||
// 每天 23:00 执行
|
|||
const job = schedule.scheduleJob('0 0 23 * * ?', function () { |
|||
statProcessNodes(); |
|||
}); |
|||
})(); |
@ -0,0 +1,19 @@ |
|||
pipeline { |
|||
agent { |
|||
node { |
|||
label 'jnlp-slave' |
|||
} |
|||
} |
|||
|
|||
stages { |
|||
stage('Building pep-stats ......') { |
|||
steps { |
|||
sh 'switch-auth.sh anxinyun' |
|||
buildName '#${BUILD_NUMBER} ~/fs-cloud/${JOB_NAME}:${IMAGE_VERSION}' |
|||
buildDescription 'registry.cn-hangzhou.aliyuncs.com/${CLOUD}/${JOB_NAME}:${IMAGE_VERSION}' |
|||
sh 'docker build -t registry.cn-hangzhou.aliyuncs.com/${CLOUD}/${JOB_NAME}:${IMAGE_VERSION} .' |
|||
sh 'docker push registry.cn-hangzhou.aliyuncs.com/${CLOUD}/${JOB_NAME}:${IMAGE_VERSION}' |
|||
} |
|||
} |
|||
} |
|||
} |
@ -0,0 +1,36 @@ |
|||
/** |
|||
* Created by Julin on 2022/11/07. |
|||
*/ |
|||
'use strict'; |
|||
|
|||
const { ClickHouse } = require('clickhouse'); |
|||
|
|||
module.exports = function (config) { |
|||
const defaultConfig = require('../../config').clickhouse; |
|||
config = config || defaultConfig; |
|||
if (config) { |
|||
try { |
|||
const { url, port, username, password, databases = [] } = config; |
|||
let clickhouse = {}; |
|||
for (let db of databases) { |
|||
clickhouse[db.key] = new ClickHouse({ |
|||
url, |
|||
port, |
|||
format: 'json', |
|||
basicAuth: username && password ? { |
|||
username, |
|||
password, |
|||
} : null, |
|||
config: { |
|||
database: db.name |
|||
} |
|||
}); |
|||
} |
|||
process.clickhouse = clickhouse; |
|||
return clickhouse; |
|||
} catch (err) { |
|||
process.logger.error('ClickHouse init error:', err); |
|||
process.exit(1); |
|||
} |
|||
} |
|||
}; |
@ -0,0 +1,24 @@ |
|||
'use strict'; |
|||
|
|||
const Sequelize = require('sequelize'); |
|||
|
|||
module.exports = function (config) { |
|||
const defaultConfig = require('../../config').postgres; |
|||
config = config || defaultConfig; |
|||
|
|||
if (config) { |
|||
let orm = new Sequelize(config.url, config.opts); |
|||
let dc = { |
|||
orm, |
|||
ORM: Sequelize, |
|||
models: {} |
|||
}; |
|||
if (Array.isArray(config.models)) { |
|||
config.models.forEach(fn => { |
|||
fn(dc); |
|||
}); |
|||
} |
|||
process.postgres = dc; |
|||
return dc; |
|||
} |
|||
}; |
@ -0,0 +1,46 @@ |
|||
'use strict'; |
|||
|
|||
const winston = require('winston'); |
|||
const fs = require('fs'); |
|||
const path = require('path'); |
|||
const moment = require('moment'); |
|||
|
|||
module.exports = function (config) { |
|||
const defaultConfig = require('../../config').logger; |
|||
config = config || defaultConfig; |
|||
config.level = config.level || 'error'; |
|||
config.filename = config.filename || path.join(process.cwd(), "log", "runtime.log"); |
|||
let dir = path.dirname(config.filename); |
|||
let logger = {}; |
|||
try { |
|||
if (!fs.existsSync(dir)) { |
|||
fs.mkdirSync(dir); |
|||
} |
|||
logger = new (winston.Logger)({ |
|||
level: config.level, |
|||
transports: [ |
|||
new (winston.transports.Console)({ |
|||
colorize: 'all', |
|||
timestamp: () => moment().format('YYYY-MM-DD HH:mm:ss.SSS') |
|||
}), |
|||
new (winston.transports.File)(config) |
|||
], |
|||
exitOnError: false |
|||
}); |
|||
} catch (err) { |
|||
console.log(err); |
|||
logger = new (winston.Logger)({ |
|||
level: config.level, |
|||
transports: [ |
|||
new (winston.transports.Console)({ |
|||
colorize: 'all', |
|||
timestamp: () => moment().format('YYYY-MM-DD HH:mm:ss.SSS') |
|||
}) |
|||
], |
|||
exitOnError: false |
|||
}); |
|||
} |
|||
logger.log('debug', '[LOGGER]', 'Init.'); |
|||
process.logger = logger; |
|||
return logger; |
|||
}; |
@ -0,0 +1,8 @@ |
|||
/** |
|||
* Created by Julin on 2022/11/07. |
|||
*/ |
|||
'use strict'; |
|||
|
|||
module.exports = function (dc) { |
|||
require('./stats_process_nodes')(dc); |
|||
}; |
@ -0,0 +1,32 @@ |
|||
module.exports = function (dc) { |
|||
const StatsProcessNodes = dc.orm.define('statsProcessNodes', { |
|||
processVersionId: { |
|||
field: 'process_version_id', |
|||
type: dc.ORM.INTEGER, |
|||
primaryKey: true, |
|||
unique: true, |
|||
allowNull: false |
|||
}, |
|||
processId: { |
|||
field: 'process_id', |
|||
type: dc.ORM.INTEGER, |
|||
allowNull: false |
|||
}, |
|||
processName: { |
|||
field: 'process_name', |
|||
type: dc.ORM.STRING, |
|||
allowNull: false |
|||
}, |
|||
processNodesTotal: { |
|||
field: 'process_nodes_total', |
|||
type: dc.ORM.INTEGER, |
|||
allowNull: false |
|||
} |
|||
}, { |
|||
tableName: 'stats_process_nodes' |
|||
}); |
|||
|
|||
dc.models.StatsProcessNodes = StatsProcessNodes; |
|||
|
|||
return StatsProcessNodes; |
|||
}; |
@ -0,0 +1,68 @@ |
|||
/** |
|||
* Created by Julin on 2022/11/07. |
|||
*/ |
|||
'use strict'; |
|||
|
|||
module.exports = async function () { |
|||
try { |
|||
// 1. delete all rows in the table
|
|||
await clearStatsProcessNodes(); |
|||
// 2. insert data into the table
|
|||
let processes = await process.clickhouse['pg_pepca_m'].query(` |
|||
select v.id as version_id, |
|||
v.process_id as process_id, |
|||
p.name as process_name, |
|||
v.bpmn_json |
|||
from pg_pepca_m.workflow_process_version as v |
|||
inner join pg_pepca_m.workflow_process as p on v.process_id=p.id |
|||
where v.current=true and p.is_enable=true and p.deleted=false |
|||
order by v.id desc |
|||
`).toPromise();
|
|||
let dataToDB = processes.reduce((p, c) => { |
|||
let nodes = JSON.parse(c.bpmn_json); |
|||
let taskNodesCount = 0; |
|||
for (let key in nodes) { |
|||
if (nodes[key].type == 'bpmn:UserTask') taskNodesCount++; |
|||
} |
|||
p.push({ |
|||
processVersionId: c.version_id, |
|||
processId: c.process_id, |
|||
processName: c.process_name, |
|||
processNodesTotal: taskNodesCount |
|||
}); |
|||
return p; |
|||
}, []); |
|||
await storageStatsProcessNodes(dataToDB); |
|||
} catch (err) { |
|||
process.logger.error('Something error in function [statProcessNodes]:', err); |
|||
} |
|||
}; |
|||
|
|||
async function clearStatsProcessNodes() { |
|||
const transaction = await process.postgres.orm.transaction(); |
|||
const models = process.postgres.models; |
|||
const { Op } = process.postgres.ORM; |
|||
try { |
|||
await models.StatsProcessNodes.destroy({ |
|||
where: { processVersionId: { [Op.gt]: 0 } }, |
|||
transaction |
|||
}); |
|||
await transaction.commit(); |
|||
} catch (err) { |
|||
await transaction.rollback(); |
|||
process.logger.error('Destroy data from Postgres DB [stats_process_nodes] error:', err); |
|||
} |
|||
}; |
|||
|
|||
async function storageStatsProcessNodes(data) { |
|||
const transaction = await process.postgres.orm.transaction(); |
|||
const models = process.postgres.models; |
|||
try { |
|||
await models.StatsProcessNodes.bulkCreate(data, { transaction }); |
|||
await transaction.commit(); |
|||
process.logger.info('Sync data to Postgres DB [stats_process_nodes]'); |
|||
} catch (err) { |
|||
await transaction.rollback(); |
|||
process.logger.error('Storage data to Postgres DB [stats_process_nodes] error:', err); |
|||
} |
|||
}; |
@ -0,0 +1,24 @@ |
|||
{ |
|||
"name": "pep-stats", |
|||
"version": "1.0.0", |
|||
"description": "pep project statistics and analysis", |
|||
"main": "index.js", |
|||
"scripts": { |
|||
"test": "echo \"Error: no test specified\" && exit 1" |
|||
}, |
|||
"keywords": [ |
|||
"pep", |
|||
"stats" |
|||
], |
|||
"author": "pengling", |
|||
"license": "ISC", |
|||
"dependencies": { |
|||
"args": "^3.0.7", |
|||
"clickhouse": "^2.6.0", |
|||
"moment": "^2.29.4", |
|||
"node-schedule": "^2.1.0", |
|||
"pg": "^8.8.0", |
|||
"sequelize": "^7.0.0-alpha2.2", |
|||
"winston": "^2.3.1" |
|||
} |
|||
} |
Loading…
Reference in new issue