Skip to content

Commit 26fc2e1

Browse files
committed
Merge branch 'release/1.1.0'
2 parents 78a4fed + 18e09e9 commit 26fc2e1

File tree

30 files changed

+5727
-449
lines changed

30 files changed

+5727
-449
lines changed

README.md

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -8,16 +8,14 @@ Browse the [wiki pages](https://github.com/balis/hyperflow/wiki) to learn more a
88

99
##Getting started
1010

11-
The latest release of HyperFlow is 1.0.0
11+
The latest release of HyperFlow is 1.1.0
1212

1313
###Installation
14-
* Download the package: https://github.com/dice-cyfronet/hyperflow/archive/1.0.0.zip
15-
* Install the latest node.js (http://nodejs.org)
16-
* Install dependencies (in `hyperflow` directory): `npm install -d`
17-
* Install the Redis server 2.6.x or higher (http://redis.io) (tested with version 2.6.x)
18-
* Set an environment variable `HFLOW_PATH` to point to your hyperflow root directory.
19-
* Add `$HFLOW_PATH/bin` to your `PATH`.
14+
* Install the latest Node.js (http://nodejs.org)
15+
* Install HyperFlow: `npm install https://github.com/dice-cyfronet/hyperflow/archive/1.1.0.tar.gz`
16+
* Install dependencies: <br>`cd hyperflow`<br>`npm install -d`
17+
* Install Redis server (http://redis.io)
2018

2119
###Running
22-
* Start the redis server
23-
* Run example workflows from the `examples` directory as follows: <br>```hflow run <wf_directory>```
20+
* Start the Redis server: `redis-server`
21+
* Run example workflows from the `examples` directory as follows: <br>```hflow run examples/<wf_directory>```

bin/hflow

Lines changed: 47 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,56 +1,91 @@
11
#!/usr/bin/env node
2+
3+
var redisURL = process.env.REDIS_URL ? {url: process.env.REDIS_URL} : {};
4+
5+
26
var docopt = require('docopt').docopt,
37
spawn = require('child_process').spawn,
4-
fs = require('fs'),
8+
fs = require('fs'),
59
pathtool = require('path'),
610
redis = require('redis'),
7-
rcl = redis.createClient(),
11+
rcl = redis.createClient(redisURL),
812
wflib = require('../wflib').init(rcl),
913
Engine = require('../engine2'),
1014
async = require('async'),
11-
dbId = 0;
15+
dbId = 0,
16+
plugins = [];
1217

1318
var doc = "\
1419
Usage:\n\
15-
hflow run <workflow_dir_or_file> [-s]\n\
16-
hflow send <wf_id> ( <signal_file> | -d <signal_data> )\n\
20+
hflow run <workflow_dir_or_file> [-s] [--with-server] [-p <plugin_module_name> ...]\n\
21+
hflow start-server [-p <plugin_module_name> ...]\n\
22+
hflow send <wf_id> ( <signal_file> | -d <signal_data> ) [-p <plugin_module_name> ...]\n\
1723
hflow -h | --help | --version";
1824

1925
var opts = docopt(doc);
26+
var hfroot = pathtool.join(require('path').dirname(require.main.filename), "..");
2027

21-
var hfroot = process.env.HFLOW_PATH;
28+
if (opts['-p']) {
29+
opts['<plugin_module_name>'].forEach(load_plugin);
30+
}
2231

2332
if (opts.run) {
2433
hflow_run();
2534
} else if (opts.send) {
2635
hflow_send();
36+
} else if (opts['start-server']) {
37+
hflow_start();
38+
}
39+
40+
function load_plugin(plugin_name) {
41+
try {
42+
var Plugin = require(plugin_name);
43+
plugins.push(new Plugin());
44+
} catch (err) {
45+
console.log("Plugin module:", plugin_name, "not found!");
46+
console.log(err);
47+
process.exit(1);
48+
}
49+
}
50+
51+
function hflow_start() {
52+
var server = require('../server/hyperflow-server.js')(rcl, wflib, plugins);
53+
server.listen(process.env.PORT, function() { });
54+
console.log("HyperFlow server started, app factory URI: http://%s:%d/apps", server.address().address, server.address().port);
2755
}
2856

2957
function hflow_run() {
3058
var wfpath = opts['<workflow_dir_or_file>'],
3159
wfstats = fs.lstatSync(wfpath),
3260
wffile;
33-
61+
62+
if (opts['--with-server']) {
63+
hflow_start(); // start the HTTP server
64+
}
65+
3466
if (wfstats.isDirectory()) {
3567
wffile = pathtool.join(wfpath, "workflow.json");
3668
} else if (wfstats.isFile()) {
3769
wffile = wfpath;
3870
wfpath = pathtool.dirname(wfpath);
3971
}
4072

41-
var runWf = function(wfId) {
73+
var runWf = function(wfId) {
4274
var config = {"emulate":"false", "workdir": pathtool.resolve(wfpath)};
4375
var engine = new Engine(config, wflib, wfId, function(err) {
4476
// This represent custom plugin listening on event from available eventServer
4577
// engine.eventServer.on('trace.*', function(exec, args) {
4678
// console.log('Event captured: ' + exec + ' ' + args + ' job done');
4779
// });
80+
plugins.forEach(function(plugin) {
81+
plugin.init(rcl, wflib, engine);
82+
});
4883
engine.runInstance(function(err) {
4984
console.log("Wf id="+wfId);
5085
if (opts['-s']) {
5186
// Flag -s is present: send all input signals to the workflow -> start execution
5287
wflib.getWfIns(wfId, false, function(err, wfIns) {
53-
engine.wflib.getSignalInfo(wfId, wfIns, function(err, sigs) {
88+
engine.wflib.getSignalInfo(wfId, wfIns, function(err, sigs) {
5489
engine.emitSignals(sigs);
5590
});
5691
});
@@ -61,11 +96,11 @@ function hflow_run() {
6196

6297
var createWf = function(cb) {
6398
rcl.select(dbId, function(err, rep) {
64-
rcl.flushdb(function(err, rep) {
99+
//rcl.flushdb(function(err, rep) { // flushing db here deletes the global 'hfid' entry (created earlier)
65100
wflib.createInstanceFromFile(wffile, '', function(err, id) {
66-
cb(err, id);
101+
cb(err, id);
67102
});
68-
});
103+
//});
69104
});
70105
}
71106

bin/hyperflow-create-workflow

Lines changed: 0 additions & 42 deletions
This file was deleted.

converters/pegasus_dax.js

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,8 @@
1313
* - @cb - callback function (err, wfJson)
1414
*/
1515
var fs = require('fs'),
16-
xml2js = require('xml2js');
16+
xml2js = require('xml2js'),
17+
parse = require('shell-quote').parse;
1718

1819
// Pegasus DAX converter constructor, accepts optional name of function, used to execute tasks
1920
var PegasusConverter = function(functionName) {
@@ -57,7 +58,7 @@ function parseDax(filename, cb) {
5758
if (err) {
5859
cb(new Error("File parse error."));
5960
} else {
60-
//console.log(JSON.stringify(result, null, 2));
61+
//console.log(JSON.stringify(result, null, 2));
6162
cb(null, result);
6263
}
6364
});
@@ -77,7 +78,7 @@ function createWorkflow(dax, functionName, cb) {
7778

7879
dax.adag.job.forEach(function(job) {
7980
++nextTaskId;
80-
var args = job.argument[0];
81+
var args = parse(job.argument[0]);
8182
wfOut.tasks.push({
8283
"name": job['$'].name,
8384
"function": functionName,
@@ -103,11 +104,11 @@ function createWorkflow(dax, functionName, cb) {
103104

104105
var dataId, dataName;
105106
job.uses.forEach(function(job_data) {
106-
if (job_data['$'].name) {
107+
if (job_data['$'].name) {
107108
dataName = job_data['$'].name; // dax v3.3
108-
} else {
109+
} else {
109110
dataName = job_data['$'].file; // dax v2.1
110-
}
111+
}
111112
if (!dataNames[dataName]) {
112113
++nextDataId;
113114
wfOut.data.push({

engine2/index.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
/** Hypermedia workflow execution engine.
2-
** Author: Bartosz Balis (2013)
2+
** Author: Bartosz Balis (2013-2015)
33
*/
44

55
/*

engine2/process.js

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -306,15 +306,19 @@ var ProcLogic = function() {
306306
}
307307
}
308308

309-
proc.wflib.invokeTaskFunction2(
309+
proc.wflib.invokeProcFunction(
310310
proc.appId,
311311
proc.procId,
312+
proc.firingId,
312313
funcIns,
313314
proc.sigValues,
314315
funcOuts, emul,
315316
proc.engine.eventServer,
316317
proc.engine.config,
317318
function(err, outs, options) {
319+
//console.log("FUNC INVOKED");
320+
//console.log("INS: ", JSON.stringify(proc.sigValues, null, 2));
321+
//console.log("OUTS: ", outs);
318322
err ? cb(err): cb(null, outs, asyncInvocation, funcIns, funcOuts);
319323
}
320324
);

examples/Comet/README

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
This is the Comet workflow, originally developed in the Kepler system.
2+
3+
The workflow requires "R" tool.
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
This example demonstrates a distributed workflow running under the control of two HyperFlow engines.
2+
3+
See comments in "run.sh" for running instructions.

examples/DistributedPingPong/run.sh

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@ if [ "$#" -ne 2 ]; then
44
echo "Distributed and decentralized workflow execution test using the HyperFlow REST API."
55
echo
66
echo "Usage:"
7-
echo "1. Run the 1st HyperFlow server: node app.js ( -> runs on port1)"
8-
echo "2. Run the 2nd HyperFlow server: node app.js ( -> runs on port2)"
7+
echo "1. Run the 1st HyperFlow server: hflow start-server ( -> runs on port1)"
8+
echo "2. Run the 2nd HyperFlow server: hflow start-server ( -> runs on port2)"
99
echo "3. Then run this script:"
1010
echo " cd examples/DistributedPingPong"
1111
echo " ./run.sh <port1> <port2>"

0 commit comments

Comments
 (0)