From c29b102aa969d0a7fa074efb46e1b0233e806e3f Mon Sep 17 00:00:00 2001
From: mklkun
Date: Mon, 12 Apr 2021 15:42:33 +0200
Subject: [PATCH 01/25] Add getAllClouds and getAllNodes endpoints
---
.../org/activeeon/morphemic/PAGateway.java | 18 ++++++++++++++++++
1 file changed, 18 insertions(+)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
index 169eebfb..17b758f2 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
@@ -465,6 +465,16 @@ public class PAGateway {
});
}
+ /**
+ * Get all registered clouds
+ * @return List of all table PACloud's entries
+ */
+ public List getAllClouds() {
+ return EntityManagerHelper.createQuery("SELECT pac FROM PACloud pac", PACloud.class).getResultList();
+ }
+
+
+
/**
* Remove clouds
* @param cloudIDs List of cloud IDs to remove
@@ -486,6 +496,14 @@ public class PAGateway {
EntityManagerHelper.commit();
}
+ /**
+ * Get all added nodes
+ * @return List of all table Deployment's entries
+ */
+ public List getAllNodes() {
+ return EntityManagerHelper.createQuery("SELECT d FROM Deployment d", Deployment.class).getResultList();
+ }
+
/**
* Remove nodes
* @param nodeNames List of node names to remove
--
GitLab
From 1b5624385c1568175ec7521df3614ed750801562 Mon Sep 17 00:00:00 2001
From: areniewicz
Date: Tue, 13 Apr 2021 12:57:40 +0200
Subject: [PATCH 02/25] deploy config for performance-model
---
.gitlab-ci.yml | 31 +++++++++++++++++++++++++++----
1 file changed, 27 insertions(+), 4 deletions(-)
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index eb762bce..7d4203e5 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,9 +2,14 @@
variables:
MAVEN_IMAGE: "maven:3.5.2-jdk-8"
-
LOCAL_REPO: "127.0.0.1:5000"
+ DOCKER_REPO: "gitlab.ow2.org:4567"
+ DOCKER_DIND_IMAGE: "docker:19.03.1"
+ DOCKER_DIND_SERVICE: "$DOCKER_DIND_IMAGE-dind"
+ DOCKER_DRIVER: overlay
+ DOCKER_TLS_CERTDIR: "/certs"
+
SCHEDULING_ABSTRACTION_LAYER_CLI: "mvn -DskipTests --batch-mode -f scheduling-abstraction-layer/pom.xml"
cache:
@@ -13,7 +18,7 @@ cache:
before_script:
- echo '=========================================================================='
- - echo $SCHEDULING_ABSTRACTION_LAYER_CLI
+ - echo $MORPHEMIC_PREPROCESSOR_LAYER_CLI
- echo '=========================================================================='
- mkdir -p $HOME/.m2
- echo '
Date: Tue, 13 Apr 2021 14:53:17 +0200
Subject: [PATCH 03/25] deploy config for performance-model fixed paths
---
.gitlab-ci.yml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7d4203e5..4f2ac725 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -50,8 +50,8 @@ deploy:performance-model:
services:
- $DOCKER_DIND_SERVICE
script:
- - cd morphemic-performance-model/ml_code/deployment/
- - docker build -t performance_model -f ./Dockerfile .
+ - cd morphemic-performance-model/ml_code/
+ - docker build -t performance_model -f ./deployment/Dockerfile .
- docker image ls
- echo "$K8S_SECRET_DOCKER_PASSWORD" | docker login $CI_REGISTRY -u $K8S_SECRET_DOCKER_USER --password-stdin
- docker tag $LOCAL_REPO/performance_model:unknown $CI_REGISTRY_IMAGE/performance_model:$CI_COMMIT_BRANCH
--
GitLab
From 43e6dbc9596bd33e98c38c202a0f8a6c7745db36 Mon Sep 17 00:00:00 2001
From: Jean-Didier Totow
Date: Tue, 13 Apr 2021 17:23:46 +0300
Subject: [PATCH 04/25] datasetlib removal
---
morphemic-performance-model/ml_code/datasetlib | 1 -
1 file changed, 1 deletion(-)
delete mode 160000 morphemic-performance-model/ml_code/datasetlib
diff --git a/morphemic-performance-model/ml_code/datasetlib b/morphemic-performance-model/ml_code/datasetlib
deleted file mode 160000
index c9c6d3c9..00000000
--- a/morphemic-performance-model/ml_code/datasetlib
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit c9c6d3c954b57f9dd3b5109514bd033da00c95db
--
GitLab
From db39cc3523afd370b77bb104ed44a7bd0ae9582a Mon Sep 17 00:00:00 2001
From: Jean-Didier Totow
Date: Tue, 13 Apr 2021 17:25:35 +0300
Subject: [PATCH 05/25] datasetlib as folder
---
.../ml_code/datasetlib/CHANGES.txt | 0
.../Dataset_Maker.egg-info/PKG-INFO | 79 +++++++++
.../Dataset_Maker.egg-info/SOURCES.txt | 9 +
.../dependency_links.txt | 1 +
.../Dataset_Maker.egg-info/requires.txt | 2 +
.../Dataset_Maker.egg-info/top_level.txt | 1 +
.../ml_code/datasetlib/LICENCE.txt | 22 +++
.../ml_code/datasetlib/README.txt | 71 ++++++++
.../datasetlib/datasetmaker.egg-info/PKG-INFO | 79 +++++++++
.../datasetmaker.egg-info/SOURCES.txt | 9 +
.../dependency_links.txt | 1 +
.../datasetmaker.egg-info/requires.txt | 2 +
.../datasetmaker.egg-info/top_level.txt | 1 +
.../ml_code/datasetlib/morphemic/__init__.py | 0
.../__pycache__/__init__.cpython-36.pyc | Bin 0 -> 152 bytes
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 156 bytes
.../datasetlib/morphemic/dataset/__init__.py | 154 ++++++++++++++++++
.../__pycache__/__init__.cpython-36.pyc | Bin 0 -> 5750 bytes
.../__pycache__/__init__.cpython-37.pyc | Bin 0 -> 5630 bytes
.../ml_code/datasetlib/setup.py | 18 ++
20 files changed, 449 insertions(+)
create mode 100644 morphemic-performance-model/ml_code/datasetlib/CHANGES.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/PKG-INFO
create mode 100644 morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/SOURCES.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/dependency_links.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/requires.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/top_level.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/LICENCE.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/README.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/PKG-INFO
create mode 100644 morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/SOURCES.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/dependency_links.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/requires.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/top_level.txt
create mode 100644 morphemic-performance-model/ml_code/datasetlib/morphemic/__init__.py
create mode 100644 morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-36.pyc
create mode 100644 morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-37.pyc
create mode 100644 morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__init__.py
create mode 100644 morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-36.pyc
create mode 100644 morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-37.pyc
create mode 100644 morphemic-performance-model/ml_code/datasetlib/setup.py
diff --git a/morphemic-performance-model/ml_code/datasetlib/CHANGES.txt b/morphemic-performance-model/ml_code/datasetlib/CHANGES.txt
new file mode 100644
index 00000000..e69de29b
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/PKG-INFO b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/PKG-INFO
new file mode 100644
index 00000000..bdce7448
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/PKG-INFO
@@ -0,0 +1,79 @@
+Metadata-Version: 1.0
+Name: Dataset-Maker
+Version: 0.0.1
+Summary: Python package for creating a dataset using InfluxDB data points
+Home-page: http://git.dac.ds.unipi.gr/morphemic/datasetmaker
+Author: Jean-Didier Totow
+Author-email: totow@unipi.gr
+License: LICENSE.txt
+Description: 1. Generality
+
+ Dataset maker is morphemic python library for
+ building dataset from data points registered into InfluxDB.
+ Dataset maker receives the name of an application, the start time
+ and the tolerance interval. More details are provided below.
+
+ 2. InfluxDB format
+
+ Data points in InfluxDB should have the following format for being used
+ correctly by the dataset maker:
+
+ measurement : "application_name" #mandatory
+ timestamp : timestamp #optional
+ fields : dictionnary containing metric exposed by the given application
+ cpu_usage, memory_consumption, response_time, http_latency
+ tags : dictionnary of metrics related information
+
+ The JSON describing the above information is the following:
+
+ Ex.:
+ {"measurement": "application_name",
+ "timestamp": 155655476.453,
+ "fields": {
+ "cpu_usage": 40,
+ "memory_consumption": 67.9,
+ "response_time": 28,
+ "http_latency": 12
+ },
+ "tags": {
+ "core": 2 #cpu_usage of 40% is the usage of the cpu core number 2
+ }
+ }
+
+ If data points are presented as the above format, the dataset maker will output
+ a csv (application_name.csv) file with the following schema:
+ time, cpu_usage, memory_consumption, response_time, http_latency, core
+
+ 3. Usage
+
+
+ Warming : make sure the above variables exist before importing dataset make library
+
+ from morphemic.dataset import DatasetMaker
+
+ data_maker = DatasetMaker(application, start, configs)
+ response = data_maker.make()
+
+ application, string containing the application name
+ start, when to start building the dataset
+ Ex.: '10m' , build dataset containg data point stored the 10 last minute
+ Ex.: '3h', three hours
+ Ex.: '4d', four days
+ leave empty or set to None if you wish all data points stored in your InfluxDB
+ configs is dictionnary containg parameters
+
+ {
+ "hostname": hostname or IP of InfluxDB
+ "port": port of InfluxDB
+ "username": InfluxDB username
+ "password": password of the above user
+ "dbname": database name
+ "path_dataset": path where the dataset will be saved
+ }
+
+ the response contains
+ {'status': True,'url': url, 'application': application_name, 'features': features}
+
+ or if an error occured
+ {'status': False,'message': "reason of the error"}
+Platform: UNKNOWN
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/SOURCES.txt b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/SOURCES.txt
new file mode 100644
index 00000000..80dd9ca2
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/SOURCES.txt
@@ -0,0 +1,9 @@
+README.txt
+setup.py
+Dataset_Maker.egg-info/PKG-INFO
+Dataset_Maker.egg-info/SOURCES.txt
+Dataset_Maker.egg-info/dependency_links.txt
+Dataset_Maker.egg-info/requires.txt
+Dataset_Maker.egg-info/top_level.txt
+morphemic/__init__.py
+morphemic/dataset/__init__.py
\ No newline at end of file
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/dependency_links.txt b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/dependency_links.txt
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/requires.txt b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/requires.txt
new file mode 100644
index 00000000..e20f7f05
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/requires.txt
@@ -0,0 +1,2 @@
+pandas
+influxdb
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/top_level.txt b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/top_level.txt
new file mode 100644
index 00000000..047ceb83
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/top_level.txt
@@ -0,0 +1 @@
+morphemic
diff --git a/morphemic-performance-model/ml_code/datasetlib/LICENCE.txt b/morphemic-performance-model/ml_code/datasetlib/LICENCE.txt
new file mode 100644
index 00000000..2d70f41f
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/LICENCE.txt
@@ -0,0 +1,22 @@
+Copyright (c) 2021 unipi.gr
+
+MIT License
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/morphemic-performance-model/ml_code/datasetlib/README.txt b/morphemic-performance-model/ml_code/datasetlib/README.txt
new file mode 100644
index 00000000..3f08da58
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/README.txt
@@ -0,0 +1,71 @@
+1. Generality
+
+Dataset maker is morphemic python library for
+building dataset from data points registered into InfluxDB.
+Dataset maker receives the name of an application, the start time
+and the tolerance interval. More details are provided below.
+
+2. InfluxDB format
+
+Data points in InfluxDB should have the following format for being used
+correctly by the dataset maker:
+
+measurement : "application_name" #mandatory
+timestamp : timestamp #optional
+fields : dictionnary containing metric exposed by the given application
+ cpu_usage, memory_consumption, response_time, http_latency
+tags : dictionnary of metrics related information
+
+The JSON describing the above information is the following:
+
+Ex.:
+ {"measurement": "application_name",
+ "timestamp": 155655476.453,
+ "fields": {
+ "cpu_usage": 40,
+ "memory_consumption": 67.9,
+ "response_time": 28,
+ "http_latency": 12
+ },
+ "tags": {
+ "core": 2 #cpu_usage of 40% is the usage of the cpu core number 2
+ }
+ }
+
+If data points are presented as the above format, the dataset maker will output
+a csv (application_name.csv) file with the following schema:
+time, cpu_usage, memory_consumption, response_time, http_latency, core
+
+3. Usage
+
+
+Warming : make sure the above variables exist before importing dataset make library
+
+from morphemic.dataset import DatasetMaker
+
+data_maker = DatasetMaker(application, start, configs)
+response = data_maker.make()
+
+application, string containing the application name
+start, when to start building the dataset
+Ex.: '10m' , build dataset containg data point stored the 10 last minute
+Ex.: '3h', three hours
+Ex.: '4d', four days
+leave empty or set to None if you wish all data points stored in your InfluxDB
+configs is dictionnary containg parameters
+
+{
+ "hostname": hostname or IP of InfluxDB
+ "port": port of InfluxDB
+ "username": InfluxDB username
+ "password": password of the above user
+ "dbname": database name
+ "path_dataset": path where the dataset will be saved
+}
+
+the response contains
+{'status': True,'url': url, 'application': application_name, 'features': features}
+
+or if an error occured
+
+{'status': False,'message': "reason of the error"}
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/PKG-INFO b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/PKG-INFO
new file mode 100644
index 00000000..384bb8a3
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/PKG-INFO
@@ -0,0 +1,79 @@
+Metadata-Version: 1.0
+Name: datasetmaker
+Version: 0.0.1
+Summary: Python package for creating a dataset using InfluxDB data points
+Home-page: http://git.dac.ds.unipi.gr/morphemic/datasetmaker
+Author: Jean-Didier Totow
+Author-email: totow@unipi.gr
+License: LICENSE.txt
+Description: 1. Generality
+
+ Dataset maker is morphemic python library for
+ building dataset from data points registered into InfluxDB.
+ Dataset maker receives the name of an application, the start time
+ and the tolerance interval. More details are provided below.
+
+ 2. InfluxDB format
+
+ Data points in InfluxDB should have the following format for being used
+ correctly by the dataset maker:
+
+ measurement : "application_name" #mandatory
+ timestamp : timestamp #optional
+ fields : dictionnary containing metric exposed by the given application
+ cpu_usage, memory_consumption, response_time, http_latency
+ tags : dictionnary of metrics related information
+
+ The JSON describing the above information is the following:
+
+ Ex.:
+ {"measurement": "application_name",
+ "timestamp": 155655476.453,
+ "fields": {
+ "cpu_usage": 40,
+ "memory_consumption": 67.9,
+ "response_time": 28,
+ "http_latency": 12
+ },
+ "tags": {
+ "core": 2 #cpu_usage of 40% is the usage of the cpu core number 2
+ }
+ }
+
+ If data points are presented as the above format, the dataset maker will output
+ a csv (application_name.csv) file with the following schema:
+ time, cpu_usage, memory_consumption, response_time, http_latency, core
+
+ 3. Usage
+
+
+ Warming : make sure the above variables exist before importing dataset make library
+
+ from morphemic.dataset import DatasetMaker
+
+ data_maker = DatasetMaker(application, start, configs)
+ response = data_maker.make()
+
+ application, string containing the application name
+ start, when to start building the dataset
+ Ex.: '10m' , build dataset containg data point stored the 10 last minute
+ Ex.: '3h', three hours
+ Ex.: '4d', four days
+ leave empty or set to None if you wish all data points stored in your InfluxDB
+ configs is dictionnary containg parameters
+
+ {
+ "hostname": hostname or IP of InfluxDB
+ "port": port of InfluxDB
+ "username": InfluxDB username
+ "password": password of the above user
+ "dbname": database name
+ "path_dataset": path where the dataset will be saved
+ }
+
+ the response contains
+ {'status': True,'url': url, 'application': application_name, 'features': features}
+
+ or if an error occured
+ {'status': False,'message': "reason of the error"}
+Platform: UNKNOWN
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/SOURCES.txt b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/SOURCES.txt
new file mode 100644
index 00000000..3d8cbf44
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/SOURCES.txt
@@ -0,0 +1,9 @@
+README.txt
+setup.py
+datasetmaker.egg-info/PKG-INFO
+datasetmaker.egg-info/SOURCES.txt
+datasetmaker.egg-info/dependency_links.txt
+datasetmaker.egg-info/requires.txt
+datasetmaker.egg-info/top_level.txt
+morphemic/__init__.py
+morphemic/dataset/__init__.py
\ No newline at end of file
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/dependency_links.txt b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/dependency_links.txt
new file mode 100644
index 00000000..8b137891
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/requires.txt b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/requires.txt
new file mode 100644
index 00000000..e20f7f05
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/requires.txt
@@ -0,0 +1,2 @@
+pandas
+influxdb
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/top_level.txt b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/top_level.txt
new file mode 100644
index 00000000..047ceb83
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/top_level.txt
@@ -0,0 +1 @@
+morphemic
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/__init__.py b/morphemic-performance-model/ml_code/datasetlib/morphemic/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-36.pyc b/morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b4a41f6352cbab9846bba6c984fe6bbaef058d5e
GIT binary patch
literal 152
zcmXr!<>flbqME<}1dl-k3@`#24nSPY0whuxf*CX!{Z=v*frJsnFIWAH{M=OitklFj
z-IUCf%+wg`kf|D$&2_X70h=2h`Aj1KOi&=m~3PUi1CZpd2KczG$)edBQF%UBV09DW?egFUf
literal 0
HcmV?d00001
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__init__.py b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__init__.py
new file mode 100644
index 00000000..db2c96f2
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__init__.py
@@ -0,0 +1,154 @@
+import os, json, time
+from influxdb import InfluxDBClient
+import pandas as pd
+from datetime import datetime
+
+url_path_dataset = None
+
+class Row():
+ def __init__(self, features,metricsname):
+ self.features = features
+ if "time" in self.features:
+ time_str = self.features["time"]
+ _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%S.%fZ')
+ self.features["time"] = int(_obj.timestamp())
+ if 'application' in metricsname:
+ metricsname.remove('application')
+ for field_name in metricsname:
+ if not field_name in self.features:
+ self.features[field_name] = None
+
+ def getTime(self):
+ if "time" in self.features:
+ return self.features["time"]
+ if "timestamp" in self.features:
+ return self.features["timestamp"]
+ return None
+
+ def makeCsvRow(self):
+ if "application" in self.features:
+ del self.features["application"]
+ result = ''
+ for key, _value in self.features.items():
+ result += "{0},".format(_value)
+ return result[:-1] + "\n"
+
+class Dataset():
+ def __init__(self):
+ self.rows = {}
+ self.size = 0
+ def addRow(self,row):
+ self.rows[row.getTime()] = row
+ self.size +=1
+ def reset(self):
+ self.rows = {}
+ self.size = 0
+ print("Dataset reset")
+ def getSize(self):
+ return self.size
+ def sortRows(self):
+ return sorted(list(self.rows.values()), key=lambda x: x.getTime(), reverse=True)
+ def getRows(self):
+ return list(self.rows.values())
+ def getRow(self,_time, tolerance):
+ for i in range(tolerance):
+ if int(_time + i) in self.rows:
+ return self.rows[int(_time+i)]
+ return None
+ def save(self,metricnames,application_name):
+ if "application" in metricnames:
+ metricnames.remove("application")
+ dataset_content = ''
+ for metric in metricnames:
+ dataset_content += "{0},".format(metric)
+ dataset_content = dataset_content[:-1] + "\n"
+ for row in list(self.rows.values()):
+ dataset_content += row.makeCsvRow()
+ _file = open(url_path_dataset + "{0}.csv".format(application_name),'w')
+ _file.write(dataset_content)
+ _file.close()
+ return url_path_dataset + "{0}.csv".format(application_name)
+
+class DatasetMaker():
+ def __init__(self, application, start, configs):
+ self.application = application
+ self.start_filter = start
+ self.influxdb = InfluxDBClient(host=configs['hostname'], port=configs['port'], username=configs['username'], password=configs['password'], database=configs['dbname'])
+ self.dataset = Dataset()
+ self.tolerance = 5
+ global url_path_dataset
+ url_path_dataset = configs['path_dataset']
+ if url_path_dataset[-1] != "/":
+ url_path_dataset += "/"
+
+ def getIndex(self, columns, name):
+ return columns.index(name)
+
+ def makeRow(self,columns, values):
+ row = {}
+ index = 0
+ for column in columns:
+ row[column] = values[index]
+ index +=1
+ return row
+
+ def prepareResultSet(self, result_set):
+ result = []
+ columns = result_set["series"][0]["columns"]
+ series_values = result_set["series"][0]["values"]
+ index = 0
+ for _values in series_values:
+ row = self.makeRow(columns,_values)
+ result.append(row)
+ return result
+
+ def make(self):
+ try:
+ self.influxdb.ping()
+ except Exception as e:
+ print("Could not establish connexion with InfluxDB, please verify connexion parameters")
+ print(e)
+ return {"message": "Could not establish connexion with InfluxDB, please verify connexion parameters"}
+ if self.getData() == None:
+ return {"message":"No data found"}
+
+ metricnames, _data = self.getData()
+ for _row in _data:
+ row = Row(_row,metricnames)
+ self.dataset.addRow(row)
+
+ print("Rows construction completed")
+ print("{0} rows found".format(self.dataset.getSize()))
+ #self.dataset.sortRows()
+ url = self.dataset.save(metricnames,self.application)
+ features = self.getFeatures(url)
+ if features == None:
+ return {'status': False, 'message': 'An error occured while building dataset'}
+ return {'status': True,'url': url, 'application': self.application, 'features': features}
+
+ def getFeatures(self, url):
+ try:
+ df = pd.read_csv(url)
+ return df.columns.to_list()
+ except Exception as e:
+ print("Cannot extract data feature list")
+ return None
+
+ def extractMeasurement(self, _json):
+ return _json["series"][0]["columns"]
+
+ def getData(self):
+ query = None
+ try:
+ if self.start_filter != None and self.start_filter != "":
+ query = "SELECT * FROM " + self.application +" WHERE time > now() - "+ self.start_filter
+ else:
+ query = "SELECT * FROM " + self.application
+ result_set = self.influxdb.query(query=query)
+ series = self.extractMeasurement(result_set.raw)
+ #self.influxdb.close() #closing connexion
+ return [series, self.prepareResultSet(result_set.raw)]
+ except Exception as e:
+ print("Could not collect query data points")
+ print(e)
+ return None
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-36.pyc b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9894da3b51ac629a70ae88ffcbbca133e8be4732
GIT binary patch
literal 5750
zcmcIo%W@mX6`l7CL6D+pTB7`jCz6;5jzv0Ur<8Khiflb%Cl>68sz^p+!$b6t90mCWRsPC!5inCo&gBZQe~AvO;5jO`u4r&o_iZ_FDz89
z{ATUTA65+GKgQH$qka>2)H80F9qgHu}^^AJWREr<=b~*<~w?DYm
zN#b6Po9!xpx1@Pq`y!_yT$)(Dh+KRs;m
zMP3eWZub`MY!k)M92!jsKQ!9LV^98?Phnm(MpkaNt)V5X6KiDW_RwxSC&r1MC(KQ#
z03WyO%At&dm)1VJw${D2CLXMP`0m=hch|Po*LE5;R|Pw9lpn}AlU!8=SuXp0L)l3$
zS97dpdDQLK9OYzjXGeN^8vC1=tDI)K-TtQ`7tsCvy^X!J8*jAZsCP{yB8lb3r!sBF
ztvuW4rn0{mcazoz6c=T2-buE9+_}+gCcPwYHrM;dDi}BB@bRpI!Z1Cvf?w4fRH3!?
z@r0JiL0o)Nm0EpoqB2q3%2;t*z)7jhtGds+2vwEpk}lqnT(4&8t>VFj_5O-t5l!Xq
z#`y#2Uf}%M_1Ttnwnnh+
z&=fW-ZJ(Iejgh&3=^-#70NuYraN%B{o&CC+kNW*i(u(pV?I~m6{N=6Dn@_0@Wmca4
zfU9N~wsVs_?qF)r0N3P8Q~Y0fgkv~uZ8C1nm8Zz`@fw}1aLx6%!vw~zL*zPJOi;H7V(@D=fo18
z74ZV}yR7`%1!^Y-ok1FfPmHmnHenvTWYr}n)hy-6^e}_}B!l?bc+x5eF*)N#iSofx
zx!@_r#}vUQomE3AM1>U5MoHLgLJG%dgLAIK5t?AdLXsCT4+!A&Le)QIhgBHghJ0r<
zuDnPHnDa8Gl|n7Tz+<6`!DlOD6|YPyBPU0hBxQ(V7fKl{lwS&gyEq?9Mxm73m*Adf
zD@wU~{?8~Cp+HqZS>vy*>(LppXacP%PN$wAgb7NQS^>}Z?kyPW4ZM{thFZYFq??6u%bV@YxhF&{h+%XN*=1ve@r{}4(tg}iO9!&k>__=tQxvSL
z+(U`5rQB901#ZH)!#
zO43{9I*YeiyvO2nbzRe;?sW|C&PV_VbvAxZVCkPLInu&Cx#(XvacBRBpf}#;J;Z(4
z(xC|}@kx+suii$bNKuD?Skna1Y^J9uk)9Bmx^#5uB4P2qaWT)%k*wm092ljCdBH^^
z@&T`T%&9!Ku|Qb
z3F+kfEIvS?bM|N($>&qjgQ`wLOau3ObEAN2pmD9L$bDr_q~x)bRJ95ceG(Ac+7!w!
zK_-96Rj#8b$)Wgwt3RN8KyWA7&87^lZB3lq#;t8##ImcvmuCe`abGG?rqzF%2aIWs
zPl}}Nz$BFfLH8{qoN|+%h>s9kIY=2g2i+d>5US{mBq{(4eZ=XXVD6YV&C?>;zmQ%#m{xYHnHu%UDBg`k{-R(Q#@SyaLqW(g7bqx$4p0LZ;~^=7WHivO
zD(k2zuMq#N+&XC`DHBphdu){&T$#~eIdyW@BA-v>{}hc~oT`vp1HJ?=p{uj+
zaT_}sVGkqs=+e*ui@pqoWa4EHJ)FV(p#}Ez$(H-;*rA0Tx^=|ZSSC1LG18xI48eE{
z9mul!AUshE3L905EWC-7NRoi`3yp>16Gc*<73%QEtxKGC$|Ho8TI$QVAIbQ>&gr+}
zyn)^lV-8qssnifMy4PkjboRYTB&Vv#>>6wTM&)>sTW7{;8V`-$JC2cCk1O&66Gs%&
z7_8<10#$*7X&JJFnE;sr<6oJNjGpy~xd2!|7>_OaNLa$=c+poJd;lXGYF-^Vxy$ed
zI2moP?GIh8Hfm5*Nzj*o=!FMo}
zm>Y6@9wU|S92joCKKSHTde9MJFU><9fwz$(?S=5;UVMZjR(P1?d*S$Bfj7f`CkC&C
zh;7Nv@zgY!0p14o$dr#Tl|{SpV4XSVJ?M$SD@06+I+yQ(y0bd*Nt>$i-
zX(1N`WbLo)05eXYJwfs7;3L=vPuS^0U|)VcQO
zkVNZv94)92Wr-d6B}ZocSvJu(#9b1cg?(4FRtBpxJD)PaL;;KtlPEz=%n}^1?$qT2
z^iMIA=Hl{f=r-FK@GvWpT8@j2-vjCa!Yxh;Fqk;8cKeFzQ5rv^H~QDwPK#PDsZbR9H?-u
zGjX!?`sd}ff8a8VUWHWKtP!DFT)p6w5Lz6a>)gN;?S-^A9C{vkZ4gv~Rg|{mXsxQO
z6#0n;9Fd{9sYtuLe$*3DCR^;bSuocsh>1C|{=l==Rl#QQx6=(q;EbZIl1Z?A$6CCw
Gc;&z9In0v)
literal 0
HcmV?d00001
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-37.pyc b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fd046025c6568187cf364fe9f4aabeebc1785b0b
GIT binary patch
literal 5630
zcmcIoU2hx572Vk{E|(O|D2lCoH48Us%+!%mAdTC?ah=-EM;kM#ZPY2zmRWID(#j;4
znq68FO9lm;KIRV;K_LNs>A&go_N73d{1@`lo-<2Qq@titm9VorGk16H+65gn=+Phq&7cQ$zIgS(#>9?2D?!&?rcK!FpaWqy!GwD&2~HP$633*F*wS-S*J{O
zw2op_g}xiJiH^Vw|9GD(eia=F#rKRyu3F<=is!KfX->zk=rL1|acfpHw`t5)nQfYd
zy+K3ho#E!iE-Qb{l(vz6LVsXg|v>%v2`M@856O8`5~Mlfuuh}f9YJJ{%$j`go8mh
z?u1#K^mAim|M9KK)u;c#UoQNHrT3CMaTfK`+}%!8FU%UY-dgU$o5OCFTaTinmfLYE&`oR;P<(2f<`M{@X|2OcZhX8P8S^Bt_WJOkS
z_vDJK;a-*(WF2>3J_FOP=B3*OUgy4C!de{*#)b|Oh6+4`m&~T35Mx9o2Pq;Y9!2Lv
zNt@s$2&derAzU%4PZvBzxrRPwd8e30QGf@A=N?)j<~p>njXlKsH3+uBNLrJpS`$k>
z5Anj(FW{+R7S6Yz-&tq7p_FjvD-fG1RfB_PN)?mm8)F@>EE*#-rTIjV5TS}y7-h6F
z{ZatjMH#TEHKS6wGf*+%@2>3gRX0eq~{4r@8bZeR-d&5Zq(q
z*f^Q%$AAFWio>&>@ve8n-j)n+H25MeV|00@T$-p1UQ0Kwcau)oO>f|#5Pkq>Givja
ziXKNQjT(0DQaqBm-Hp>sdmwjpn?hPa~&?lza^Ue83qpD#=^iL==93Of_KE&x&I
zB_s!*_tQ}(gnj?vJl|Qlqr(19lv{wM)^tjmEvL;)k(aZi8$qNK>N8M2GWVOfA67@xmiiN_>OrZF6nY);*Q;8k10nO
zcq*SDzb?wafnHNF38)n`MYd6Nv%0{33E5|(lRnPHLBnT)=_LT(Gn`-cxu*TxP6kmw
zuMbtXJqWYiwk!xYcMcRXR_=7V2|!ya7^~8YG8K22K+;xyA?xXE)4DKyx!p
z(*Hxgn{D$hQoicwtb~<#r@A_zxA0I@hGU?sEr2nLB^Fu~B}AoeZQVL3Q{1l{$XS{F
z1T@e_>50~bocFvDmVm4>Wc){#`U9T*YZ%iRqta}7yGfeS*SS3aW98m3jg;Z*pS~jKCHE5@P!se%Fkk)KmxX)olZDZbPXxd1^krG(-Gj{ZQa1oUmkJL!V2N%I;aH}Rm|Tx75-fR<}|
zqAEtuO#!AYf13SgKL)v-O!-uxzU#~A5E+#TFNY4@VXu$sgVs5tN$!Dr
zKBD-qs1A2(oK;{ZG3O_KmN?J~Y~UdVgGe#44+G6e>);l1ti3*VP>$Hr1jpF>HS!WT
zV_E?y0$uCVaEna;9tW5Mbd;)_cxt(Y`e*envB(7HHz6}ELv0kz39J#-N
z9hy4FBeO;-j#6C!s`bRGeEOhq0dH$xU2r=#If8pV%hBTs2
zCc=1V^xv_K%zWgl+X812VT{%@0D+}mhVzOp`A~<(K&Hg}7veMHw?^OmjCGEEg0V-Y
zYDrUClqklEvkrh{!O-gyJ9EbN69GUOd+u>*>}WH0k*rWo73)X`DT=d5PbYd!-SbIF
zdlfi{&Z@#y02{|eaccRfsGYLo>!FDq2
z%h8K0TKEdsYS`hC4V@l7h>~nnL-Gt11+?({qu1Z-2VkQ_1xcrav$qTmc9F$`tzq1i
zaeqgDnWeM|@MSojr|Khk>;Z`Bm$>H^5^IsH3I4{4dKb5rJ;1AZ`OaY{8c+lEJ<77$
zB0}^LOWnYb(iu(X?topL!kGH=3S{4({Twg_w20K(oO5&%Z`o~ZqzG#Aj6Q~*1+UF8
z{T#C?8Nd)G%A`6-(L}zZsW-9DtQ->A?2v|qs
z&}$B4%TrM(+bDmEXf9<*TUXi4)!NFlj6sv&_nB?=31?2r0-Y*IaEG|2BxhphzP8I~
zeQECtPMDj3bk0jhFDr^=k^|c9rn-ai1(xFS(^DbJq`ee~Sk@?Cjf#!m1?~aE9;&N9
z;iXyfcx6egZ_CcPg(#dm=x^wdBaU_vjbsK(y``T7pY$&_EJ{(dbMz7<`~GZnqRaR_
z{z77Z1?Ofq-0P4X5cw{j;c^RfZGb^^ZwHYN$9r)6U*UNCRySJ9@Qavc6q}4w99E%`
z{e`^*0JmAxy0!avKE8A7LGVWK{=HA`2BV9?!w>J=yA$v{5ZnNH99(GxZw5HOIs5n_
z*B|-Ku;9-86-Gbq>Zw=4nq`C%J5VV*yxb7RCu6xQdA$ZA&MGKkvFx_
z2OQPTJP$^9G5Z8p%0sV#JYb@ny=eLfGG9~?P(`Sn$(&Ob_^u+CxYGHoQ0dp~60e1t
zTygiSgnb#N>K=zJ
mHmtr1qGI{1KXBf8uZPuQxz4~9H2B^6J
literal 0
HcmV?d00001
diff --git a/morphemic-performance-model/ml_code/datasetlib/setup.py b/morphemic-performance-model/ml_code/datasetlib/setup.py
new file mode 100644
index 00000000..7e0ad6cc
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/setup.py
@@ -0,0 +1,18 @@
+from setuptools import setup
+
+setup(
+ name = 'datasetmaker',
+ version = '0.0.1',
+ author = 'Jean-Didier Totow',
+ author_email = 'totow@unipi.gr',
+ packages = ['morphemic', 'morphemic.dataset'],
+ scripts = [],
+ url='http://git.dac.ds.unipi.gr/morphemic/datasetmaker',
+ license='LICENSE.txt',
+ description='Python package for creating a dataset using InfluxDB data points',
+ long_description=open('README.txt').read(),
+ install_requires=[
+ "pandas",
+ "influxdb",
+ ],
+)
--
GitLab
From 96616628e7570b2f8777407d3f74ec664a158054 Mon Sep 17 00:00:00 2001
From: Maroun Koussaifi
Date: Tue, 13 Apr 2021 16:41:00 +0000
Subject: [PATCH 06/25] Fix RMConnectionHelper Tests
---
.../morphemic/service/RMConnectionHelper.java | 6 ++++--
.../morphemic/service/RMConnectionHelperTest.java | 13 ++++++++-----
2 files changed, 12 insertions(+), 7 deletions(-)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
index 29c7cf38..c667f106 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
@@ -33,10 +33,12 @@ public class RMConnectionHelper {
* @return The initialized RM Interface to be used for sending request to the platform
*/
public static RMRestInterface init(String paURL) {
- if(paURL.contains("trydev2")){
+ if(paURL.contains("trydev2.activeeon")){
sessionPreferencesId = "RM_sessionId_trydev2";
- }else{
+ }else if(paURL.contains("trydev.activeeon")){
sessionPreferencesId = "RM_sessionId";
+ }else{
+ sessionPreferencesId = "TESTING_PREF";
}
// Initialize the client
rmRestInterface = new RMRestClient(paURL + RESOURCE_MANAGER_REST_PATH, null).getRm();
diff --git a/scheduling-abstraction-layer/src/test/java/org/activeeon/morphemic/service/RMConnectionHelperTest.java b/scheduling-abstraction-layer/src/test/java/org/activeeon/morphemic/service/RMConnectionHelperTest.java
index 65329284..57cea96e 100644
--- a/scheduling-abstraction-layer/src/test/java/org/activeeon/morphemic/service/RMConnectionHelperTest.java
+++ b/scheduling-abstraction-layer/src/test/java/org/activeeon/morphemic/service/RMConnectionHelperTest.java
@@ -14,6 +14,7 @@ import org.ow2.proactive_grid_cloud_portal.common.RMRestInterface;
import javax.security.auth.login.LoginException;
import java.security.KeyException;
+import java.util.prefs.Preferences;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
@@ -27,6 +28,8 @@ class RMConnectionHelperTest {
private final String DUMMY_SESSION_ID = "SESSION_ID";
+ private final Preferences userPreferences = Preferences.userRoot().node("USER_PREFERENCES");
+
@Mock
RMRestInterface rmRestInterface;
@@ -58,10 +61,7 @@ class RMConnectionHelperTest {
// Inject the current mocked RM Interface to the RMConnectionHelper class
RMConnectionHelper.setRmRestInterface(rmRestInterface);
- // Initialize a testing user preference variable
- // It is used to store the session
- String DUMMY_PREFERENCE_ID = "TESTING_PREF";
- RMConnectionHelper.setSessionPreferencesId(DUMMY_PREFERENCE_ID);
+ userPreferences.remove("TESTING_PREF");
// Temporary disable all Logging from the RMConnectionHelper class
// It is enabled after the tests are completed
@@ -79,7 +79,7 @@ class RMConnectionHelperTest {
RMRestInterface initRMRestInterface = RMConnectionHelper.init(DUMMY_CONNECTION_URL);
String DUMMY_RM_INTERFACE_MESSAGE = "org.ow2.proactive_grid_cloud_portal.common.RMRestInterface";
assertTrue(initRMRestInterface.toString().contains(DUMMY_RM_INTERFACE_MESSAGE));
- assertEquals(RMConnectionHelper.getSessionId(), "");
+ assertEquals("", RMConnectionHelper.getSessionId());
}
/**
@@ -94,6 +94,7 @@ class RMConnectionHelperTest {
*/
@Test
void connect() throws LoginException, KeyException, RMException {
+ RMConnectionHelper.setSessionPreferencesId("TESTING_PREF");
RMConnectionHelper.connect(DUMMY_USERNAME,DUMMY_PASSWORD);
assertEquals(DUMMY_SESSION_ID,RMConnectionHelper.getSessionId());
}
@@ -104,6 +105,7 @@ class RMConnectionHelperTest {
*/
@Test
void disconnect() {
+ RMConnectionHelper.setSessionPreferencesId("TESTING_PREF");
RMConnectionHelper.disconnect();
assertEquals("", RMConnectionHelper.getSessionId());
}
@@ -124,5 +126,6 @@ class RMConnectionHelperTest {
@AfterEach
void enableLogging() {
Logger.getLogger(RMConnectionHelper.class).setLevel(Level.ALL);
+ userPreferences.remove("TESTING_PREF");
}
}
\ No newline at end of file
--
GitLab
From ee0c185ffa05d63b1a10db581bdbacfab9c3310e Mon Sep 17 00:00:00 2001
From: mklkun
Date: Wed, 14 Apr 2021 03:48:07 +0200
Subject: [PATCH 07/25] Fix clouds removal from SAL
---
.../java/org/activeeon/morphemic/PAGateway.java | 8 ++++++--
.../activeeon/morphemic/model/Deployment.java | 6 ++++++
.../org/activeeon/morphemic/model/PACloud.java | 16 +++++++++++++++-
.../java/org/activeeon/morphemic/model/Task.java | 8 ++++++--
.../morphemic/service/EntityManagerHelper.java | 8 +++++++-
5 files changed, 40 insertions(+), 6 deletions(-)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
index 17b758f2..9f2a3ddc 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
@@ -427,15 +427,17 @@ public class PAGateway {
LOGGER.info("Node source defined.");
+ LOGGER.info("Trying to retrieve task: " + node.optString("taskName"));
+ Task task = EntityManagerHelper.find(Job.class, jobId).findTask(node.optString("taskName"));
+
newDeployment.setPaCloud(cloud);
+ newDeployment.setTask(task);
EntityManagerHelper.persist(newDeployment);
LOGGER.debug("Deployment created: " + newDeployment.toString());
EntityManagerHelper.persist(cloud);
LOGGER.info("Deployment added to the related cloud: " + cloud.toString());
- LOGGER.info("Trying to retrieve task: " + node.optString("taskName"));
- Task task = EntityManagerHelper.find(Job.class, jobId).findTask(node.optString("taskName"));
task.addDeployment(newDeployment);
EntityManagerHelper.persist(task);
});
@@ -491,6 +493,8 @@ public class PAGateway {
LOGGER.error(e.getStackTrace());
}
}
+ cloud.getDeployments().forEach(deployment -> deployment.getTask().removeDeployment(deployment));
+ cloud.clearDeployments();
EntityManagerHelper.remove(cloud);
});
EntityManagerHelper.commit();
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
index 48dea298..a486521c 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
@@ -35,6 +35,9 @@ public class Deployment implements Serializable {
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.REFRESH)
private PACloud paCloud;
+ @ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.REFRESH)
+ private Task task;
+
@Column(name = "IS_DEPLOYED")
private Boolean isDeployed = false;
@@ -45,6 +48,9 @@ public class Deployment implements Serializable {
", locationName='" + locationName + '\'' +
", imageProviderId='" + imageProviderId + '\'' +
", hardwareProviderId='" + hardwareProviderId + '\'' +
+ ", isDeployed='" + isDeployed.toString() + '\'' +
+ ", paCloud='" + paCloud.getNodeSourceNamePrefix() + '\'' +
+ ", task='" + task.getName() + '\'' +
'}';
}
}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/PACloud.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/PACloud.java
index 09c986c9..76b4bda1 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/PACloud.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/PACloud.java
@@ -11,6 +11,7 @@ import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.stream.Collectors;
@AllArgsConstructor
@NoArgsConstructor
@@ -61,7 +62,7 @@ public class PACloud implements Serializable {
@ElementCollection(targetClass=String.class)
private Map deployedRegions;
- @OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.REFRESH)
+ @OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.ALL)
private List deployments;
@OneToOne
@@ -74,6 +75,14 @@ public class PACloud implements Serializable {
deployments.add(deployment);
}
+ public void removeDeployment(Deployment deployment) {
+ deployments.remove(deployment);
+ }
+
+ public void clearDeployments() {
+ deployments.clear();
+ }
+
public void addDeployedRegion(String region, String imageProviderId) {
if (deployedRegions==null){
deployedRegions = new HashMap<>();
@@ -87,6 +96,10 @@ public class PACloud implements Serializable {
@Override
public String toString() {
+ String deploymentsPrint = deployments == null ? "[]" : deployments.stream()
+ .map(Deployment::getNodeName)
+ .collect(Collectors.toList())
+ .toString();
return "PACloud{" +
"cloudID='" + cloudID + '\'' +
", nodeSourceNamePrefix='" + nodeSourceNamePrefix + '\'' +
@@ -94,6 +107,7 @@ public class PACloud implements Serializable {
", securityGroup='" + securityGroup + '\'' +
", dummyInfrastructureName='" + dummyInfrastructureName + '\'' +
", deployedRegions=" + deployedRegions +
+ ", deployments='" + deploymentsPrint + '\'' +
'}';
}
}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
index 66dfccf7..cdbda4ed 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
@@ -10,7 +10,7 @@ import java.util.LinkedList;
import java.util.List;
-@AllArgsConstructor
+//@AllArgsConstructor
@NoArgsConstructor
@ToString
@Getter
@@ -34,7 +34,7 @@ public class Task implements Serializable {
@Embedded
private DockerEnvironment environment;
- @OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.REFRESH)
+ @OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.ALL)
private List deployments;
@OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.REFRESH)
@@ -66,6 +66,10 @@ public class Task implements Serializable {
deployments.add(deployment);
}
+ public void removeDeployment(Deployment deployment) {
+ deployments.remove(deployment);
+ }
+
public void addSubmittedTaskName(String submittedTaskName) {
if (submittedTaskNames==null){
submittedTaskNames = new LinkedList<>();
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
index 5bad8175..7f2b43cc 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
@@ -2,6 +2,8 @@ package org.activeeon.morphemic.service;
import org.hibernate.CacheMode;
import org.hibernate.jpa.QueryHints;
+import java.util.HashMap;
+import java.util.Map;
import javax.persistence.*;
@@ -12,7 +14,11 @@ public class EntityManagerHelper {
private static final ThreadLocal threadLocal;
static {
- emf = Persistence.createEntityManagerFactory("model");
+ Map properties = new HashMap<>();
+ properties.put("javax.persistence.jdbc.url", "jdbc:mariadb://localhost:3306/proactive");
+ properties.put("javax.persistence.jdbc.user", "root");
+ properties.put("javax.persistence.jdbc.password", "admin");
+ emf = Persistence.createEntityManagerFactory("model", properties);
threadLocal = new ThreadLocal();
}
--
GitLab
From d2e80fca725d79798dc9bed32446bf972a1a4abc Mon Sep 17 00:00:00 2001
From: mklkun
Date: Wed, 14 Apr 2021 03:49:54 +0200
Subject: [PATCH 08/25] Cancel useless changes
---
.../activeeon/morphemic/service/EntityManagerHelper.java | 8 +-------
1 file changed, 1 insertion(+), 7 deletions(-)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
index 7f2b43cc..5bad8175 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
@@ -2,8 +2,6 @@ package org.activeeon.morphemic.service;
import org.hibernate.CacheMode;
import org.hibernate.jpa.QueryHints;
-import java.util.HashMap;
-import java.util.Map;
import javax.persistence.*;
@@ -14,11 +12,7 @@ public class EntityManagerHelper {
private static final ThreadLocal threadLocal;
static {
- Map properties = new HashMap<>();
- properties.put("javax.persistence.jdbc.url", "jdbc:mariadb://localhost:3306/proactive");
- properties.put("javax.persistence.jdbc.user", "root");
- properties.put("javax.persistence.jdbc.password", "admin");
- emf = Persistence.createEntityManagerFactory("model", properties);
+ emf = Persistence.createEntityManagerFactory("model");
threadLocal = new ThreadLocal();
}
--
GitLab
From 23c8f9ca4e7b17b4c32e2760b4e051b3d1f99055 Mon Sep 17 00:00:00 2001
From: mklkun
Date: Wed, 14 Apr 2021 17:54:55 +0200
Subject: [PATCH 09/25] Add scaling in and out endpoints and some improvements
---
.../org/activeeon/morphemic/PAGateway.java | 418 ++++++++++++++++--
.../activeeon/morphemic/model/Deployment.java | 8 +
.../org/activeeon/morphemic/model/Task.java | 5 +-
.../resources/collect_ip_addr_results.groovy | 18 +
.../post_prepare_infra_script.groovy | 1 +
5 files changed, 408 insertions(+), 42 deletions(-)
create mode 100644 scheduling-abstraction-layer/src/main/resources/collect_ip_addr_results.groovy
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
index 9f2a3ddc..3aa2d25e 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
@@ -45,6 +45,10 @@ public class PAGateway {
public PAConnectorIaasGateway connectorIaasGateway;
+ final String NEW_LINE = System.getProperty("line.separator");
+
+ final String SCRIPTS_SEPARATION = NEW_LINE + NEW_LINE + "# Main script" + NEW_LINE;
+
private static final Logger LOGGER = Logger.getLogger(PAGateway.class);
/**
@@ -424,7 +428,6 @@ public class PAGateway {
cloud.addDeployedRegion(newDeployment.getLocationName(),
newDeployment.getLocationName() + "/" + newDeployment.getImageProviderId());
}
-
LOGGER.info("Node source defined.");
LOGGER.info("Trying to retrieve task: " + node.optString("taskName"));
@@ -432,6 +435,7 @@ public class PAGateway {
newDeployment.setPaCloud(cloud);
newDeployment.setTask(task);
+ newDeployment.setNumber(task.getNextDeploymentID());
EntityManagerHelper.persist(newDeployment);
LOGGER.debug("Deployment created: " + newDeployment.toString());
@@ -486,16 +490,24 @@ public class PAGateway {
EntityManagerHelper.begin();
cloudIDs.forEach(cloudID -> {
PACloud cloud = EntityManagerHelper.find(PACloud.class, cloudID);
+ LOGGER.info("Removing cloud : " + cloud.toString());
for (Map.Entry entry : cloud.getDeployedRegions().entrySet()) {
try {
- resourceManagerGateway.removeNodeSource(cloud.getNodeSourceNamePrefix() + entry.getKey(), preempt);
+ String nodeSourceName = cloud.getNodeSourceNamePrefix() + entry.getKey();
+ LOGGER.info("Removing node source " + nodeSourceName + " from the ProActive server.");
+ resourceManagerGateway.removeNodeSource(nodeSourceName, preempt);
} catch (NotConnectedException | PermissionRestException e) {
LOGGER.error(e.getStackTrace());
}
}
- cloud.getDeployments().forEach(deployment -> deployment.getTask().removeDeployment(deployment));
+ if (cloud.getDeployments() != null) {
+ LOGGER.info("Cleaning deployments from related tasks " + cloud.getDeployments().toString());
+ cloud.getDeployments().forEach(deployment -> deployment.getTask().removeDeployment(deployment));
+ }
+ LOGGER.info("Cleaning deployments from the cloud entry");
cloud.clearDeployments();
EntityManagerHelper.remove(cloud);
+ LOGGER.info("Cloud removed.");
});
EntityManagerHelper.commit();
}
@@ -516,7 +528,7 @@ public class PAGateway {
public void removeNodes(List nodeNames, Boolean preempt) {
nodeNames.forEach(nodeName -> {
try {
- String nodeUrl = resourceManagerGateway.searchNodes(nodeNames, true).get(0);
+ String nodeUrl = resourceManagerGateway.searchNodes(Collections.singletonList(nodeName), true).get(0);
resourceManagerGateway.removeNode(nodeUrl, preempt);
LOGGER.info("Node " + nodeName + " with URL: " + nodeUrl + " has been removed successfully.");
} catch (NotConnectedException | RestException e) {
@@ -620,8 +632,6 @@ public class PAGateway {
}
private List createCommandsTask(Task task, String taskNameSuffix, String taskToken, Job job) {
- final String newLine = System.getProperty("line.separator");
- final String scriptsSeparation = newLine + newLine + "# Main script" + newLine;
List scriptTasks = new LinkedList<>();
ScriptTask scriptTaskStart = null;
ScriptTask scriptTaskInstall = null;
@@ -638,7 +648,7 @@ public class PAGateway {
task.getInstallation().getPostInstall().isEmpty())) {
if (!task.getInstallation().getInstall().isEmpty()) {
scriptTaskInstall = PAFactory.createBashScriptTask(task.getName() + "_install" + taskNameSuffix,
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getInstall());
} else {
scriptTaskInstall = PAFactory.createBashScriptTask(task.getName() + "_install" + taskNameSuffix,
@@ -647,13 +657,13 @@ public class PAGateway {
if (!task.getInstallation().getPreInstall().isEmpty()) {
scriptTaskInstall.setPreScript(PAFactory.createSimpleScript(
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getPreInstall(),
"bash"));
}
if (!task.getInstallation().getPostInstall().isEmpty()) {
scriptTaskInstall.setPostScript(PAFactory.createSimpleScript(
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getPostInstall(),
"bash"));
}
@@ -669,7 +679,7 @@ public class PAGateway {
task.getInstallation().getPostStart().isEmpty())) {
if (!task.getInstallation().getStart().isEmpty()) {
scriptTaskStart = PAFactory.createBashScriptTask(task.getName() + "_start" + taskNameSuffix,
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getStart());
} else {
scriptTaskStart = PAFactory.createBashScriptTask(task.getName() + "_start" + taskNameSuffix,
@@ -678,13 +688,13 @@ public class PAGateway {
if (!task.getInstallation().getPreStart().isEmpty()) {
scriptTaskStart.setPreScript(PAFactory.createSimpleScript(
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getPreStart(),
"bash"));
}
if (!task.getInstallation().getPostStart().isEmpty()) {
scriptTaskStart.setPostScript(PAFactory.createSimpleScript(
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getPostStart(),
"bash"));
}
@@ -759,7 +769,7 @@ public class PAGateway {
return 1;
}
// Let's find the task:
- Optional optTask = Optional.ofNullable(EntityManagerHelper.find(Task.class,optJob.get().findTask(taskName)));
+ Optional optTask = Optional.ofNullable(optJob.get().findTask(taskName));
if (!optTask.isPresent()) {
LOGGER.error(String.format("Task [%s] not found", taskName));
return 1;
@@ -772,30 +782,270 @@ public class PAGateway {
return 2;
}
- // Let's clone the deployment/node as needed.
+ // Saving suffix IDs of new nodes
+ List newNodesNumbers = new LinkedList<>();
+
+ // Let's clone the deployment/node as needed
Deployment oldDeployment = optDeployment.get();
nodeNames.stream().map(nodeName -> {
- Deployment newDeployment = new Deployment();
- newDeployment.setPaCloud(oldDeployment.getPaCloud());
- newDeployment.setNodeName(nodeName);
- newDeployment.setLocationName(oldDeployment.getLocationName());
- newDeployment.setIsDeployed(false);
- newDeployment.setImageProviderId(oldDeployment.getImageProviderId());
- newDeployment.setHardwareProviderId(oldDeployment.getHardwareProviderId());
- EmsDeploymentRequest newEmsDeploymentReq = oldDeployment.getEmsDeployment().clone(nodeName);
- newDeployment.setEmsDeployment(newEmsDeploymentReq);
- return newDeployment;
- }).forEach( deployment -> {
- optTask.get().addDeployment(deployment);
- EntityManagerHelper.persist(deployment.getEmsDeployment());
- EntityManagerHelper.persist(deployment);
- EntityManagerHelper.persist(optTask.get());
+ EmsDeploymentRequest newEmsDeploymentReq =
+ oldDeployment.getEmsDeployment() == null ? null : oldDeployment.getEmsDeployment().clone(nodeName);
+ return new Deployment(nodeName,
+ oldDeployment.getLocationName(),
+ oldDeployment.getImageProviderId(),
+ oldDeployment.getHardwareProviderId(),
+ newEmsDeploymentReq,
+ oldDeployment.getPaCloud(),
+ oldDeployment.getTask(),
+ false,
+ null,
+ null
+ );
+ })
+ .forEach(deployment -> {
+ // Persist new deployment data
+ deployment.setNumber(optTask.get().getNextDeploymentID());
+ newNodesNumbers.add(optTask.get().getNextDeploymentID());
+ optTask.get().addDeployment(deployment);
+ if (deployment.getEmsDeployment() != null) {
+ EntityManagerHelper.persist(deployment.getEmsDeployment());
+ }
+ deployment.getPaCloud().addDeployment(deployment);
+ EntityManagerHelper.persist(deployment);
+ EntityManagerHelper.persist(optTask.get());
+ EntityManagerHelper.persist(deployment.getPaCloud());
});
EntityManagerHelper.commit();
+
+ // Let's deploy the VMS
+ submitScalingOutJob(optJob.get(), taskName, newNodesNumbers);
+
return 0;
}
+ private void submitScalingOutJob(Job job, String scaledTaskName, List newNodesNumbers) {
+ EntityManagerHelper.refresh(job);
+ LOGGER.info("Task: " + scaledTaskName + " of job " + job.toString() + " to be scaled out.");
+
+ TaskFlowJob paJob = new TaskFlowJob();
+ paJob.setName(job.getName() + "_" + scaledTaskName + "_ScaleOut");
+ LOGGER.info("Job created: " + paJob.toString());
+
+ EntityManagerHelper.begin();
+
+ job.getTasks().forEach(task -> {
+ List scriptTasks = buildScalingOutPATask(task, job, scaledTaskName);
+
+ if (scriptTasks != null && !scriptTasks.isEmpty()) {
+ scriptTasks.forEach(scriptTask -> {
+ try {
+ paJob.addTask(scriptTask);
+ } catch (UserException e) {
+ LOGGER.error("Task " + task.getName() + " could not be added due to: " + e.toString());
+ }
+ });
+ EntityManagerHelper.persist(task);
+ }
+ });
+
+ setAllScalingOutMandatoryDependencies(paJob, job, scaledTaskName, newNodesNumbers);
+
+ paJob.setProjectName("Morphemic");
+
+ long submittedJobId = schedulerGateway.submit(paJob).longValue();
+ job.setSubmittedJobId(submittedJobId);
+
+ EntityManagerHelper.persist(job);
+ EntityManagerHelper.commit();
+ LOGGER.info("Scaling out of task \'" + scaledTaskName + "\' job, submitted successfully. ID = " + submittedJobId);
+ }
+
+ private void setAllScalingOutMandatoryDependencies(TaskFlowJob paJob, Job jobToSubmit, String scaledTaskName, List newNodesNumbers) {
+ jobToSubmit.getTasks().forEach(task -> {
+ if (task.getParentTasks() != null && !task.getParentTasks().isEmpty()) {
+ task.getParentTasks().forEach(parentTaskName -> {
+ paJob.getTasks().forEach(paTask -> {
+ paJob.getTasks().forEach(paParentTask -> {
+ if (paTask.getName().contains(task.getName()) && paParentTask.getName().contains(parentTaskName)) {
+ if (paParentTask.getName().contains(scaledTaskName)) {
+ if (newNodesNumbers.stream().anyMatch(entry -> paParentTask.getName().endsWith(entry.toString()))) {
+ if (paTask.getName().contains(task.getDeploymentFirstSubmittedTaskName()) &&
+ paParentTask.getName().contains(jobToSubmit.findTask(parentTaskName).getDeploymentLastSubmittedTaskName())) {
+ paTask.addDependence(paParentTask);
+ }
+ } else {
+ if (paTask.getName().contains(task.getDeploymentFirstSubmittedTaskName()) &&
+ paParentTask.getName().startsWith("prepareInfra")) {
+ paTask.addDependence(paParentTask);
+ }
+ }
+ } else if (paTask.getName().contains(scaledTaskName)) {
+ if (newNodesNumbers.stream().anyMatch(entry -> paTask.getName().endsWith(entry.toString()))) {
+ if (paTask.getName().contains(task.getDeploymentFirstSubmittedTaskName()) &&
+ paParentTask.getName().contains(jobToSubmit.findTask(parentTaskName).getDeploymentLastSubmittedTaskName())) {
+ paTask.addDependence(paParentTask);
+ }
+ } else {
+ if (paTask.getName().startsWith("prepareInfra") &&
+ paParentTask.getName().contains(jobToSubmit.findTask(parentTaskName).getDeploymentLastSubmittedTaskName())) {
+ paTask.addDependence(paParentTask);
+ }
+ }
+ }
+ }
+ });
+ });
+ });
+ }
+ });
+ }
+
+ private List buildScalingOutPATask(Task task, Job job, String scaledTaskName) {
+ List scriptTasks = new LinkedList<>();
+ Task scaledTask = job.findTask(scaledTaskName);
+
+ if (scaledTask.getParentTasks().contains(task.getName())) {
+ // When the scaled task is a child the task to be built
+ LOGGER.info("Building task " + task.getName() + " as a parent of task " + scaledTaskName);
+ scriptTasks.addAll(createParentScaledTask(task, job));
+ } else if (scaledTaskName.equals(task.getName())) {
+ // When the scaled task is the task to be built
+ LOGGER.info("Building task " + task.getName() + " as it is scaled out");
+ scriptTasks.addAll(buildScaledPATask(task, job));
+ } else if (task.getParentTasks().contains(scaledTaskName)) {
+ // When the scaled task is a parent of the task to be built
+ LOGGER.info("Building task " + task.getName() + " as a child of task " + scaledTaskName);
+ scriptTasks.addAll(createChildScaledTask(task, job));
+ } else {
+ LOGGER.debug("Task " + task.getName() + " is not impacted by the scaling of task " + scaledTaskName);
+ }
+
+ return scriptTasks;
+ }
+
+ private List createChildScaledTask(Task task, Job job) {
+ List scriptTasks = new LinkedList<>();
+ task.getDeployments().stream().filter(Deployment::getIsDeployed).forEach(deployment -> {
+ // Creating infra deployment tasks
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
+ scriptTasks.add(createScalingChildUpdateTask(task, suffix, token, job));
+ });
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ task.setDeploymentLastSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ return scriptTasks;
+ }
+
+ private ScriptTask createScalingChildUpdateTask(Task task, String suffix, String token, Job job) {
+ ScriptTask scriptTaskUpdate = null;
+
+ Map taskVariablesMap = new HashMap<>();
+ //TODO: Taking into consideration multiple parent tasks with multiple communications
+ taskVariablesMap.put("requestedPortName", new TaskVariable("requestedPortName",
+ job.findTask(task.getParentTasks().get(0)).getPortsToOpen().get(0).getRequestedName()));
+
+ if (!task.getInstallation().getUpdateCmd().isEmpty()) {
+ scriptTaskUpdate = PAFactory.createBashScriptTask(task.getName() + "_update" + suffix,
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
+ task.getInstallation().getUpdateCmd());
+ } else {
+ scriptTaskUpdate = PAFactory.createBashScriptTask(task.getName() + "_install" + suffix,
+ "echo \"Installation script is empty. Nothing to be executed.\"");
+ }
+
+ scriptTaskUpdate.setPreScript(PAFactory.createSimpleScriptFromFIle("collect_ip_addr_results.groovy",
+ "groovy"));
+
+ scriptTaskUpdate.setVariables(taskVariablesMap);
+ scriptTaskUpdate.addGenericInformation("NODE_ACCESS_TOKEN", token);
+
+ return scriptTaskUpdate;
+ }
+
+ private List buildScaledPATask(Task task, Job job) {
+ List scriptTasks = new LinkedList<>();
+
+ task.getDeployments().stream().filter(Deployment::getIsDeployed).forEach(deployment -> {
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
+
+ // Creating infra preparation task
+ scriptTasks.add(createInfraPreparationTask(task, suffix, token, job));
+ });
+
+ task.setDeploymentLastSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+
+ task.getDeployments().stream().filter(deployment -> !deployment.getIsDeployed()).forEach(deployment -> {
+ // Creating infra deployment tasks
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
+ scriptTasks.add(createInfraTask(task, deployment, suffix, token));
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(scriptTasks.size()-1).getName().substring(0, scriptTasks.get(scriptTasks.size()-1).getName().lastIndexOf("_")));
+ // If the infrastructure comes with the deployment of the EMS, we set it up.
+ Optional.ofNullable(deployment.getEmsDeployment()).ifPresent(emsDeploymentRequest -> scriptTasks.add(createEmsDeploymentTask(emsDeploymentRequest,suffix,token)));
+ LOGGER.info("Token added: " + token);
+ deployment.setIsDeployed(true);
+ deployment.setNodeAccessToken(token);
+
+ // Creating application deployment tasks
+ List appTasks = createAppTasks(task, suffix, token, job);
+ task.setDeploymentLastSubmittedTaskName(appTasks.get(appTasks.size()-1).getName().substring(0, appTasks.get(appTasks.size()-1).getName().lastIndexOf(suffix)));
+
+ // Creating infra preparation task
+ appTasks.add(0, createInfraPreparationTask(task, suffix, token, job));
+ appTasks.get(1).addDependence(appTasks.get(0));
+
+ // Add dependency between infra and application deployment tasks
+ appTasks.get(0).addDependence(scriptTasks.get(scriptTasks.size()-1));
+
+ scriptTasks.addAll(appTasks);
+ });
+
+ scriptTasks.forEach(scriptTask -> task.addSubmittedTaskName(scriptTask.getName()));
+
+ return scriptTasks;
+ }
+
+ private List createParentScaledTask(Task task, Job job) {
+ List scriptTasks = new LinkedList<>();
+ task.getDeployments().stream().filter(Deployment::getIsDeployed).forEach(deployment -> {
+ // Creating infra deployment tasks
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
+ scriptTasks.add(createScalingParentInfraPreparationTask(task, suffix, token, job));
+ });
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ task.setDeploymentLastSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ return scriptTasks;
+ }
+
+ private ScriptTask createScalingParentInfraPreparationTask(Task task, String suffix, String token, Job job) {
+ ScriptTask prepareInfraTask;
+ Map taskVariablesMap = new HashMap<>();
+ String taskName = "parentPrepareInfra_" + task.getName() + suffix;
+
+ if (!task.getPortsToOpen().isEmpty()) {
+ prepareInfraTask = PAFactory.createGroovyScriptTaskFromFile(taskName, "post_prepare_infra_script.groovy");
+ prepareInfraTask.setPreScript(PAFactory.createSimpleScriptFromFIle("prepare_infra_script.sh",
+ "bash"));
+ //TODO: Taking into consideration multiple provided ports
+ taskVariablesMap.put("providedPortName", new TaskVariable("providedPortName",
+ task.getPortsToOpen().get(0).getRequestedName()));
+ taskVariablesMap.put("providedPortValue", new TaskVariable("providedPortValue",
+ task.getPortsToOpen().get(0).getValue().toString()));
+ } else {
+ prepareInfraTask = PAFactory.createBashScriptTask(taskName,
+ "echo \"No ports to open and not parent tasks. Nothing to be prepared in VM.\"");
+ }
+
+ prepareInfraTask.setVariables(taskVariablesMap);
+ prepareInfraTask.addGenericInformation("NODE_ACCESS_TOKEN", token);
+
+ return prepareInfraTask;
+ }
+
/**
* Unregister a set of node as a scale-down operation
* @param nodeNames A list of node to be removed
@@ -816,7 +1066,7 @@ public class PAGateway {
}
// Let's find the task:
- Optional optTask = Optional.ofNullable(EntityManagerHelper.find(Task.class,optJob.get().findTask(taskName)));
+ Optional optTask = Optional.ofNullable(optJob.get().findTask(taskName));
if (!optTask.isPresent()) {
LOGGER.error(String.format("Task [%s] not found", taskName));
return 1;
@@ -829,24 +1079,108 @@ public class PAGateway {
}
// For supplied node, I retrieve their deployment
- List deployments = nodeNames.stream().map(node -> EntityManagerHelper.find(Deployment.class,node)).filter(deployment -> (deployment != null)).collect(Collectors.toList());
+ List deployments = nodeNames.stream().map(node -> EntityManagerHelper.find(Deployment.class,node)).filter(Objects::nonNull).collect(Collectors.toList());
// For deployed node, I flag their removal
- List nodesToBeRemoved = deployments.stream().filter(deployment -> deployment.getIsDeployed()).map(Deployment::getNodeName).collect(Collectors.toList());
+ List nodesToBeRemoved = deployments.stream().filter(Deployment::getIsDeployed).map(Deployment::getNodeName).collect(Collectors.toList());
+ LOGGER.info("Nodes to be removed are : " + nodesToBeRemoved);
// For every node, I remove the deployment entree
deployments.forEach(
deployment -> {
+ deployment.getTask().removeDeployment(deployment);
+ EntityManagerHelper.persist(deployment.getTask());
+ deployment.getPaCloud().removeDeployment(deployment);
+ EntityManagerHelper.persist(deployment.getPaCloud());
EntityManagerHelper.remove(deployment);
- EntityManagerHelper.persist(deployment);
}
);
// I commit the removal of deployed node
- removeNodes(nodesToBeRemoved,false);
+ removeNodes(nodesToBeRemoved,true);
EntityManagerHelper.commit();
+
+ // Let's deploy the VMS
+ submitScalingInJob(optJob.get(), taskName);
+
return 0;
}
+ private void submitScalingInJob(Job job, String scaledTaskName) {
+ EntityManagerHelper.refresh(job);
+ LOGGER.info("Task: " + scaledTaskName + " of job " + job.toString() + " to be scaled in.");
+
+ TaskFlowJob paJob = new TaskFlowJob();
+ paJob.setName(job.getName() + "_" + scaledTaskName + "_ScaleIn");
+ LOGGER.info("Job created: " + paJob.toString());
+
+ EntityManagerHelper.begin();
+
+ job.getTasks().forEach(task -> {
+ List scriptTasks = buildScalingInPATask(task, job, scaledTaskName);
+
+ if (scriptTasks != null && !scriptTasks.isEmpty()) {
+ scriptTasks.forEach(scriptTask -> {
+ try {
+ paJob.addTask(scriptTask);
+ } catch (UserException e) {
+ LOGGER.error("Task " + task.getName() + " could not be added due to: " + e.toString());
+ }
+ });
+ EntityManagerHelper.persist(task);
+ }
+ });
+
+ setAllScalingInMandatoryDependencies(paJob, job, scaledTaskName);
+
+ paJob.setProjectName("Morphemic");
+
+ long submittedJobId = schedulerGateway.submit(paJob).longValue();
+ job.setSubmittedJobId(submittedJobId);
+
+ EntityManagerHelper.persist(job);
+ EntityManagerHelper.commit();
+ LOGGER.info("Scaling out of task \'" + scaledTaskName + "\' job, submitted successfully. ID = " + submittedJobId);
+ }
+
+ private void setAllScalingInMandatoryDependencies(TaskFlowJob paJob, Job jobToSubmit, String scaledTaskName) {
+ Task scaledTask = jobToSubmit.findTask(scaledTaskName);
+ jobToSubmit.getTasks().forEach(task -> {
+ if (task.getParentTasks() != null && !task.getParentTasks().isEmpty()) {
+ task.getParentTasks().forEach(parentTaskName -> {
+ paJob.getTasks().forEach(paTask -> {
+ paJob.getTasks().forEach(paParentTask -> {
+ if (paTask.getName().contains(task.getName()) && paParentTask.getName().contains(parentTaskName)) {
+ if (paTask.getName().contains(task.getDeploymentFirstSubmittedTaskName()) &&
+ paParentTask.getName().contains(jobToSubmit.findTask(parentTaskName).getDeploymentLastSubmittedTaskName())) {
+ paTask.addDependence(paParentTask);
+ }
+ }
+ });
+ });
+ });
+ }
+ });
+ }
+
+ private List buildScalingInPATask(Task task, Job job, String scaledTaskName) {
+ List scriptTasks = new LinkedList<>();
+ Task scaledTask = job.findTask(scaledTaskName);
+
+ if (scaledTaskName.equals(task.getName())) {
+ // When the scaled task is the task to be built
+ LOGGER.info("Building task " + task.getName() + " as it is scaled out");
+ scriptTasks.addAll(buildScaledPATask(task, job));
+ } else if (task.getParentTasks().contains(scaledTaskName)) {
+ // When the scaled task is a parent of the task to be built
+ LOGGER.info("Building task " + task.getName() + " as a child of task " + scaledTaskName);
+ scriptTasks.addAll(createChildScaledTask(task, job));
+ } else {
+ LOGGER.debug("Task " + task.getName() + " is not impacted by the scaling of task " + scaledTaskName);
+ }
+
+ return scriptTasks;
+ }
+
/**
* Translate a Morphemic task skeleton into a list of ProActive tasks
* @param task A Morphemic task skeleton
@@ -855,7 +1189,6 @@ public class PAGateway {
*/
public List buildPATask(Task task, Job job) {
List scriptTasks = new LinkedList<>();
- List tasksTokens = new LinkedList<>();
if (task.getDeployments() == null || task.getDeployments().isEmpty()) {
LOGGER.warn("The task " + task.getName() + " does not have a deployment. It will be scheduled on any free node.");
@@ -864,16 +1197,19 @@ public class PAGateway {
task.setDeploymentLastSubmittedTaskName(scriptTasks.get(scriptTasks.size()-1).getName());
}
else {
- task.getDeployments().forEach(deployment -> {
+ task.getDeployments().stream().filter(deployment -> !deployment.getIsDeployed()).forEach(deployment -> {
// Creating infra deployment tasks
- String token = task.getTaskId() + tasksTokens.size();
- String suffix = "_" + tasksTokens.size();
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
scriptTasks.add(createInfraTask(task, deployment, suffix, token));
// If the infrastructure comes with the deployment of the EMS, we set it up.
Optional.ofNullable(deployment.getEmsDeployment()).ifPresent(emsDeploymentRequest -> scriptTasks.add(createEmsDeploymentTask(emsDeploymentRequest,suffix,token)));
- LOGGER.debug("Token added: " + token);
- tasksTokens.add(token);
+ LOGGER.info("Token added: " + token);
deployment.setIsDeployed(true);
+ deployment.setNodeAccessToken(token);
+
+ LOGGER.info("+++ Deployment number: " + deployment.getNumber());
+
// Creating application deployment tasks
List appTasks = createAppTasks(task, suffix, token, job);
@@ -888,7 +1224,7 @@ public class PAGateway {
scriptTasks.addAll(appTasks);
});
- task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_0")));
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
}
scriptTasks.forEach(scriptTask -> task.addSubmittedTaskName(scriptTask.getName()));
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
index a486521c..d19b9f9b 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
@@ -41,6 +41,12 @@ public class Deployment implements Serializable {
@Column(name = "IS_DEPLOYED")
private Boolean isDeployed = false;
+ @Column(name = "NODE_ACCESS_TOKEN")
+ private String nodeAccessToken;
+
+ @Column(name = "NUMBER")
+ private Long number;
+
@Override
public String toString() {
return "Deployment{" +
@@ -49,6 +55,8 @@ public class Deployment implements Serializable {
", imageProviderId='" + imageProviderId + '\'' +
", hardwareProviderId='" + hardwareProviderId + '\'' +
", isDeployed='" + isDeployed.toString() + '\'' +
+ ", nodeAccessToken='" + nodeAccessToken + '\'' +
+ ", number='" + number + '\'' +
", paCloud='" + paCloud.getNodeSourceNamePrefix() + '\'' +
", task='" + task.getName() + '\'' +
'}';
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
index cdbda4ed..cf926f03 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
@@ -10,7 +10,6 @@ import java.util.LinkedList;
import java.util.List;
-//@AllArgsConstructor
@NoArgsConstructor
@ToString
@Getter
@@ -59,11 +58,15 @@ public class Task implements Serializable {
@Column(name = "DEPLOYMENT_LAST_SUBMITTED_TASK_NAME")
private String deploymentLastSubmittedTaskName;
+ @Column(name = "NEXT_DEPLOYMENT_ID")
+ private Long nextDeploymentID = 0L;
+
public void addDeployment(Deployment deployment) {
if (deployments==null){
deployments = new LinkedList<>();
}
deployments.add(deployment);
+ nextDeploymentID++;
}
public void removeDeployment(Deployment deployment) {
diff --git a/scheduling-abstraction-layer/src/main/resources/collect_ip_addr_results.groovy b/scheduling-abstraction-layer/src/main/resources/collect_ip_addr_results.groovy
new file mode 100644
index 00000000..7d4e5164
--- /dev/null
+++ b/scheduling-abstraction-layer/src/main/resources/collect_ip_addr_results.groovy
@@ -0,0 +1,18 @@
+def requestedPortName = variables.get("requestedPortName")
+def publicRequestedPort
+def count = 0
+
+variables.each { key, value ->
+ if (key.contains(requestedPortName)) {
+ if (count == 0) {
+ publicRequestedPort = value.toString()
+ count++
+ } else {
+ publicRequestedPort += "," + value.toString()
+ count++
+ }
+ }
+}
+
+println "publicRequestedPort: " + publicRequestedPort
+variables.put(requestedPortName, publicRequestedPort)
\ No newline at end of file
diff --git a/scheduling-abstraction-layer/src/main/resources/post_prepare_infra_script.groovy b/scheduling-abstraction-layer/src/main/resources/post_prepare_infra_script.groovy
index 71962fa5..b590df29 100644
--- a/scheduling-abstraction-layer/src/main/resources/post_prepare_infra_script.groovy
+++ b/scheduling-abstraction-layer/src/main/resources/post_prepare_infra_script.groovy
@@ -5,5 +5,6 @@ if (providedPortName?.trim()){
def ipAddr = new File(providedPortName+"_ip").text.trim()
def publicProvidedPort = ipAddr + ":" + providedPortValue
variables.put(providedPortName, publicProvidedPort)
+ variables.put(providedPortName + variables.get("PA_TASK_ID"), publicProvidedPort)
println("Provided variable " + providedPortName + "=" + publicProvidedPort)
}
\ No newline at end of file
--
GitLab
From 9f9bbf199235a5339effb36a0dc55388af2f3ac5 Mon Sep 17 00:00:00 2001
From: Fotis Paraskevopoulos
Date: Thu, 15 Apr 2021 14:17:02 +0300
Subject: [PATCH 10/25] Adding AMQ stomp library for python
---
.../MorphemicConnection.py | 88 +++++++++++++++++++
amq-message-python-library/__init__.py | 2 +
amq-message-python-library/requirements.txt | 1 +
3 files changed, 91 insertions(+)
create mode 100644 amq-message-python-library/MorphemicConnection.py
create mode 100644 amq-message-python-library/__init__.py
create mode 100644 amq-message-python-library/requirements.txt
diff --git a/amq-message-python-library/MorphemicConnection.py b/amq-message-python-library/MorphemicConnection.py
new file mode 100644
index 00000000..d67a179f
--- /dev/null
+++ b/amq-message-python-library/MorphemicConnection.py
@@ -0,0 +1,88 @@
+
+import stomp
+import logging
+import json
+
+from stomp.listener import PrintingListener
+
+class Connection:
+
+ subscriptions = []
+
+ def __init__(self, username, password,
+ host='localhost',
+ port=61613,
+ debug=True):
+
+ self.hosts = [(host, port)]
+ self.username = username
+ self.password = password
+ self.conn = stomp.Connection(host_and_ports=self.hosts)
+
+ if debug:
+ print("Enabling debug")
+ self.conn.set_listener('print', PrintingListener())
+
+ def set_listener(self, id, listener):
+ if self.conn:
+ self.conn.set_listener(id,listener)
+
+ def subscribe(self,destination, id, ack='auto'):
+
+ ref = next((item for item in self.subscriptions if item['id'] == id), None)
+
+ if not ref:
+ self.subscriptions.append(
+ {
+ 'id': id,
+ 'destination': destination,
+ 'ack': ack,
+ }
+ )
+
+ def topic(self,destination, id, ack='auto'):
+ self.subscribe("/topic/%s" % destination ,id,ack)
+
+ def queue(self,destination, id, ack='auto'):
+ self.subscribe("/queue/%s" % destination,id,ack)
+
+ def is_topic(self,headers, topic):
+ return headers.get('destination') == '/topic/metrics_to_predict'
+
+ def unsubscribe(self, id):
+ if not self.conn:
+ return
+
+ if not self.conn.running:
+ return
+
+ self.conn.unsubscribe(id)
+
+
+ def connect(self, wait=True):
+
+ if not self.conn:
+ return
+
+ self.conn.connect(self.username, self.password, wait=wait)
+
+ for s in self.subscriptions:
+
+ self.conn.subscribe(s['destination'], s['id'], s['ack'])
+
+
+ def disconnect(self):
+
+ for s in self.subscriptions:
+ self.unsubscribe(s['id'])
+
+ self.conn.disconnect()
+
+
+ def send_to_topic(self,destination, body, headers={}, **kwargs):
+
+ if not self.conn:
+ logging.error("Connect first")
+ return
+
+ self.conn.send(destination="/topic/%s" % destination,body=json.dumps(body),content_type="application/json",headers=headers, **kwargs)
diff --git a/amq-message-python-library/__init__.py b/amq-message-python-library/__init__.py
new file mode 100644
index 00000000..21f52880
--- /dev/null
+++ b/amq-message-python-library/__init__.py
@@ -0,0 +1,2 @@
+
+from . import MorphemicConnection as morphemic
\ No newline at end of file
diff --git a/amq-message-python-library/requirements.txt b/amq-message-python-library/requirements.txt
new file mode 100644
index 00000000..12393625
--- /dev/null
+++ b/amq-message-python-library/requirements.txt
@@ -0,0 +1 @@
+stomp.py
--
GitLab
From 726c46814208ca8573495bef67995387f146ae91 Mon Sep 17 00:00:00 2001
From: Andreas Tsagkaropoulos
Date: Thu, 15 Apr 2021 15:30:14 +0300
Subject: [PATCH 11/25] First commit of new AMQ message Java library
---
amq-message-java-library/client.bat | 24 ++
amq-message-java-library/client.sh | 21 ++
.../eu.melodic.event.brokerclient.properties | 18 +
amq-message-java-library/pom.xml | 83 +++++
.../event/brokerclient/BrokerClient.java | 345 ++++++++++++++++++
.../event/brokerclient/BrokerClientApp.java | 176 +++++++++
.../event/brokerclient/BrokerPublisher.java | 61 ++++
.../event/brokerclient/BrokerSubscriber.java | 46 +++
.../brokerclient/event/EventGenerator.java | 71 ++++
.../event/brokerclient/event/EventMap.java | 42 +++
.../properties/BrokerClientProperties.java | 97 +++++
11 files changed, 984 insertions(+)
create mode 100644 amq-message-java-library/client.bat
create mode 100644 amq-message-java-library/client.sh
create mode 100644 amq-message-java-library/eu.melodic.event.brokerclient.properties
create mode 100644 amq-message-java-library/pom.xml
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClient.java
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClientApp.java
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerPublisher.java
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerSubscriber.java
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventGenerator.java
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventMap.java
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/properties/BrokerClientProperties.java
diff --git a/amq-message-java-library/client.bat b/amq-message-java-library/client.bat
new file mode 100644
index 00000000..32e3c121
--- /dev/null
+++ b/amq-message-java-library/client.bat
@@ -0,0 +1,24 @@
+@echo off
+::
+:: Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+::
+:: This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+:: Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+:: If a copy of the MPL was not distributed with this file, you can obtain one at
+:: https://www.mozilla.org/en-US/MPL/2.0/
+::
+
+if not exist target\dependency cmd /C "mvn dependency:copy-dependencies"
+
+set MELODIC_CONFIG_DIR=.
+
+setlocal
+set JAVA_OPTS= -Djavax.net.ssl.trustStore=..\config-files\broker-truststore.p12 ^
+ -Djavax.net.ssl.trustStorePassword=melodic ^
+ -Djavax.net.ssl.trustStoreType=pkcs12
+rem -Djavax.net.debug=all
+rem -Djavax.net.debug=ssl,handshake,record
+
+java %JAVA_OPTS% -classpath "target\classes;target\dependency\*" eu.melodic.event.brokerclient.BrokerClientApp %*
+
+endlocal
diff --git a/amq-message-java-library/client.sh b/amq-message-java-library/client.sh
new file mode 100644
index 00000000..64b3645f
--- /dev/null
+++ b/amq-message-java-library/client.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+#
+# Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+#
+# This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+# Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+# If a copy of the MPL was not distributed with this file, you can obtain one at
+# https://www.mozilla.org/en-US/MPL/2.0/
+#
+
+if [ ! -d "target/dependency" ]; then
+ mvn dependency:copy-dependencies
+fi
+
+MELODIC_CONFIG_DIR=.
+
+JAVA_OPTS=-Djavax.net.ssl.trustStore=./broker-truststore.p12\ -Djavax.net.ssl.trustStorePassword=melodic\ -Djavax.net.ssl.trustStoreType=pkcs12
+# -Djavax.net.debug=all
+# -Djavax.net.debug=ssl,handshake,record
+
+java $JAVA_OPTS -classpath "target/classes:target/dependency/*" eu.melodic.event.brokerclient.BrokerClientApp $*
diff --git a/amq-message-java-library/eu.melodic.event.brokerclient.properties b/amq-message-java-library/eu.melodic.event.brokerclient.properties
new file mode 100644
index 00000000..df01ca61
--- /dev/null
+++ b/amq-message-java-library/eu.melodic.event.brokerclient.properties
@@ -0,0 +1,18 @@
+#
+# Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+#
+# This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+# Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+# If a copy of the MPL was not distributed with this file, you can obtain one at
+# https://www.mozilla.org/en-US/MPL/2.0/
+#
+
+# Broker Client settings
+brokerclient.broker-url=tcp://localhost:61616
+brokerclient.broker-url-properties=transport.daemon=true&transport.trace=false&transport.useKeepAlive=true&transport.useInactivityMonitor=false&transport.needClientAuth=${brokerclient.ssl.client-auth.required}&transport.verifyHostName=false
+brokerclient.ssl.client-auth.required=false
+brokerclient.preserve-connection=false
+
+# Broker authentication
+brokerclient.broker-username=
+brokerclient.broker-password=
\ No newline at end of file
diff --git a/amq-message-java-library/pom.xml b/amq-message-java-library/pom.xml
new file mode 100644
index 00000000..1fe0c90e
--- /dev/null
+++ b/amq-message-java-library/pom.xml
@@ -0,0 +1,83 @@
+
+
+ 4.0.0
+
+
+ org.springframework.boot
+ spring-boot-starter-parent
+ 2.4.4
+
+
+ amq-message-java-library
+ AMQ message Java library
+ gr.ntua.imu.morphemic
+ 1.0.0
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-activemq
+
+
+ org.apache.activemq
+ activemq-broker
+
+
+
+
+ org.projectlombok
+ lombok
+ provided
+
+
+
+
+ org.apache.commons
+ commons-lang3
+ 3.8.1
+
+
+
+
+
+ eu.7bulls
+ Melodic 7bulls repository
+ https://nexus.7bulls.eu:8443/repository/maven-snapshots/
+
+
+ eu.7bulls
+ Melodic 7bulls repository
+ https://nexus.7bulls.eu:8443/repository/maven-releases/
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.2.4
+
+
+ package
+
+ shade
+
+
+
+
+
+
+
+
+
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClient.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClient.java
new file mode 100644
index 00000000..373c9f63
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClient.java
@@ -0,0 +1,345 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient;
+
+import eu.melodic.event.brokerclient.event.EventMap;
+import eu.melodic.event.brokerclient.properties.BrokerClientProperties;
+import java.io.Serializable;
+import java.util.*;
+import javax.jms.*;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.activemq.ActiveMQConnection;
+import org.apache.activemq.ActiveMQConnectionFactory;
+import org.apache.activemq.ActiveMQSslConnectionFactory;
+import org.apache.activemq.advisory.DestinationSource;
+import org.apache.activemq.command.ActiveMQQueue;
+import org.apache.activemq.command.ActiveMQTempQueue;
+import org.apache.activemq.command.ActiveMQTempTopic;
+import org.apache.activemq.command.ActiveMQTopic;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+@Slf4j
+@Component
+public class BrokerClient {
+
+ @Autowired
+ private BrokerClientProperties properties;
+ private Connection connection;
+ private Session session;
+ private HashMap listeners = new HashMap<>();
+
+ public BrokerClient() {
+ }
+
+ public BrokerClient(BrokerClientProperties bcp) {
+ properties = bcp;
+ }
+
+ public BrokerClient(Properties p) {
+ properties = new BrokerClientProperties(p);
+ }
+
+ // ------------------------------------------------------------------------
+
+ public static BrokerClient newClient(String broker_properties_configuration_file_location) throws java.io.IOException, JMSException {
+ log.info("BrokerClient: Initializing...");
+
+ /*
+ // get properties file
+ String configDir = System.getenv("MELODIC_CONFIG_DIR");
+ if (configDir == null || configDir.trim().isEmpty()) configDir = ".";
+ log.info("BrokerClient: config-dir: {}", configDir);
+ String configPropFile = configDir + "/" + "eu.melodic.event.brokerclient.properties";
+ log.info("BrokerClient: config-file: {}", configPropFile);
+ */
+
+ // load properties
+ Properties p = new Properties();
+ //ClassLoader loader = Thread.currentThread().getContextClassLoader();
+ //try (java.io.InputStream in = loader.getClass().getResourceAsStream(configPropFile)) { p.load(in); }
+ try (java.io.InputStream in = new java.io.FileInputStream(broker_properties_configuration_file_location)) {
+ p.load(in);
+ }
+ log.info("BrokerClient: config-properties: {}", p);
+
+ // initialize broker client
+ BrokerClient client = new BrokerClient(p);
+ log.info("BrokerClient: Configuration:\n{}", client.properties);
+
+ return client;
+ }
+
+ public static BrokerClient newClient(String username, String password, String broker_properties_configuration_file_path) throws java.io.IOException, JMSException {
+ BrokerClient client = newClient(broker_properties_configuration_file_path);
+ if (username!=null && password!=null) {
+ client.getClientProperties().setBrokerUsername(username);
+ client.getClientProperties().setBrokerPassword(password);
+ }
+ return client;
+ }
+
+ public static BrokerClient newClient() {
+ return new BrokerClient();
+ }
+
+ // ------------------------------------------------------------------------
+
+ public BrokerClientProperties getClientProperties() {
+ checkProperties();
+ return properties;
+ }
+
+ protected void checkProperties() {
+ if (properties==null) {
+ //use defaults
+ properties = new BrokerClientProperties();
+ }
+ }
+
+ // ------------------------------------------------------------------------
+
+ public synchronized Set getDestinationNames(String connectionString) throws JMSException {
+ // open or reuse connection
+ checkProperties();
+ boolean _closeConn = false;
+ if (session==null) {
+ openConnection(connectionString);
+ _closeConn = ! properties.isPreserveConnection();
+ }
+
+ // Get destinations from Broker
+ log.info("BrokerClient.getDestinationNames(): Getting destinations: connection={}, username={}", connectionString, properties.getBrokerUsername());
+ ActiveMQConnection conn = (ActiveMQConnection)connection;
+ DestinationSource ds = conn.getDestinationSource();
+ Set queues = ds.getQueues();
+ Set topics = ds.getTopics();
+ Set tempQueues = ds.getTemporaryQueues();
+ Set tempTopics = ds.getTemporaryTopics();
+ log.info("BrokerClient.getDestinationNames(): Getting destinations: done");
+
+ // Get destination names
+ HashSet destinationNames = new HashSet<>();
+ for (ActiveMQQueue q : queues) destinationNames.add("QUEUE "+q.getQueueName());
+ for (ActiveMQTopic t : topics) destinationNames.add("TOPIC "+t.getTopicName());
+ for (ActiveMQTempQueue tq : tempQueues) destinationNames.add("Temp QUEUE "+tq.getQueueName());
+ for (ActiveMQTempTopic tt : tempTopics) destinationNames.add("Temp TOPIC "+tt.getTopicName());
+
+ // close connection
+ if (_closeConn) {
+ closeConnection();
+ }
+
+ return destinationNames;
+ }
+
+ // ------------------------------------------------------------------------
+
+ public synchronized void publishEvent(String connectionString, String destinationName, Map eventMap) throws JMSException {
+ _publishEvent(connectionString, destinationName, new EventMap(eventMap));
+ }
+
+ protected synchronized void _publishEvent(String connectionString, String destinationName, Serializable event) throws JMSException {
+ _publishEvent(connectionString,destinationName,event,false);
+ }
+ protected synchronized void _publishEvent(String connectionString, String destinationName, Serializable event, boolean persistent_connection_demanded) throws JMSException {
+ // open or reuse connection
+ checkProperties();
+ boolean _closeConn = false;
+ if (session==null) {
+ openConnection(connectionString);
+ _closeConn = ! properties.isPreserveConnection();
+ }
+ if (persistent_connection_demanded){
+ _closeConn = false;
+ }
+
+ // Create the destination (Topic or Queue)
+ //Destination destination = session.createQueue( destinationName );
+ Destination destination = session.createTopic(destinationName);
+
+ // Create a MessageProducer from the Session to the Topic or Queue
+ MessageProducer producer = session.createProducer(destination);
+ producer.setDeliveryMode(javax.jms.DeliveryMode.NON_PERSISTENT);
+
+ // Create a messages
+ //ObjectMessage message = session.createObjectMessage(event);
+ TextMessage message = session.createTextMessage(event.toString());
+
+ // Tell the producer to send the message
+ long hash = message.hashCode();
+ log.info("BrokerClient.publishEvent(): Sending message: connection={}, username={}, destination={}, hash={}, payload={}", connectionString, properties.getBrokerUsername(), destinationName, hash, event);
+ producer.send(message);
+ log.info("BrokerClient.publishEvent(): Message sent: connection={}, username={}, destination={}, hash={}, payload={}", connectionString, properties.getBrokerUsername(), destinationName, hash, event);
+
+ // close connection
+ if (_closeConn) {
+ closeConnection();
+ }
+ }
+
+ // ------------------------------------------------------------------------
+
+ public void subscribe(String connectionString, String destinationName, MessageListener listener) throws JMSException {
+ // Create or open connection
+ checkProperties();
+ if (session==null) {
+ openConnection(connectionString);
+ }
+
+ // Create the destination (Topic or Queue)
+ log.info("BrokerClient: Subscribing to destination: {}...", destinationName);
+ //Destination destination = session.createQueue( destinationName );
+ Destination destination = session.createTopic(destinationName);
+
+ // Create a MessageConsumer from the Session to the Topic or Queue
+ MessageConsumer consumer = session.createConsumer(destination);
+ consumer.setMessageListener(listener);
+ listeners.put(listener, consumer);
+ }
+
+ public void unsubscribe(MessageListener listener) throws JMSException {
+ MessageConsumer consumer = listeners.get(listener);
+ if (consumer!=null) {
+ consumer.close();
+ }
+ }
+
+ // ------------------------------------------------------------------------
+
+ public void receiveEvents(String connectionString, String destinationName, MessageListener listener) throws JMSException {
+ checkProperties();
+ MessageConsumer consumer = null;
+ boolean _closeConn = false;
+ try {
+ // Create or open connection
+ if (session==null) {
+ openConnection(connectionString);
+ _closeConn = ! properties.isPreserveConnection();
+ }
+
+ // Create the destination (Topic or Queue)
+ log.info("BrokerClient: Subscribing to destination: {}...", destinationName);
+ //Destination destination = session.createQueue( destinationName );
+ Destination destination = session.createTopic(destinationName);
+
+ // Create a MessageConsumer from the Session to the Topic or Queue
+ consumer = session.createConsumer(destination);
+
+ // Wait for messages
+ log.info("BrokerClient: Waiting for messages...");
+ while (true) {
+ Message message = consumer.receive();
+ listener.onMessage(message);
+ }
+
+ } finally {
+ // Clean up
+ log.info("BrokerClient: Closing connection...");
+ if (consumer != null) consumer.close();
+ if (_closeConn) {
+ closeConnection();
+ }
+ }
+ }
+
+ // ------------------------------------------------------------------------
+
+ public ActiveMQConnectionFactory createConnectionFactory() {
+ // Create connection factory based on Broker URL scheme
+ checkProperties();
+ final ActiveMQConnectionFactory connectionFactory;
+ String brokerUrl = properties.getBrokerUrl();
+ if (brokerUrl.startsWith("ssl")) {
+ log.info("BrokerClient.createConnectionFactory(): Creating new SSL connection factory instance: url={}", brokerUrl);
+ final ActiveMQSslConnectionFactory sslConnectionFactory = new ActiveMQSslConnectionFactory(brokerUrl);
+ try {
+ sslConnectionFactory.setTrustStore(properties.getTruststoreFile());
+ sslConnectionFactory.setTrustStoreType(properties.getTruststoreType());
+ sslConnectionFactory.setTrustStorePassword(properties.getTruststorePassword());
+ sslConnectionFactory.setKeyStore(properties.getKeystoreFile());
+ sslConnectionFactory.setKeyStoreType(properties.getKeystoreType());
+ sslConnectionFactory.setKeyStorePassword(properties.getKeystorePassword());
+ //sslConnectionFactory.setKeyStoreKeyPassword( properties........ );
+
+ connectionFactory = sslConnectionFactory;
+ } catch (final Exception theException) {
+ throw new Error(theException);
+ }
+ } else {
+ log.info("BrokerClient.createConnectionFactory(): Creating new non-SSL connection factory instance: url={}", brokerUrl);
+ connectionFactory = new ActiveMQConnectionFactory(brokerUrl);
+ }
+
+ // Other connection factory settings
+ //connectionFactory.setSendTimeout(....5000L);
+ //connectionFactory.setTrustedPackages(Arrays.asList("eu.melodic.event"));
+ connectionFactory.setTrustAllPackages(true);
+ connectionFactory.setWatchTopicAdvisories(true);
+
+ return connectionFactory;
+ }
+
+ // ------------------------------------------------------------------------
+
+ public synchronized void openConnection() throws JMSException {
+ checkProperties();
+ openConnection(properties.getBrokerUrl(), null, null);
+ }
+
+ public synchronized void openConnection(String connectionString) throws JMSException {
+ openConnection(connectionString, null, null);
+ }
+
+ public synchronized void openConnection(String connectionString, String username, String password) throws JMSException {
+ openConnection(connectionString, username, password, properties.isPreserveConnection());
+ }
+
+ public synchronized void openConnection(String connectionString, String username, String password, boolean preserveConnection) throws JMSException {
+ checkProperties();
+ if (connectionString == null) connectionString = properties.getBrokerUrl();
+ log.debug("BrokerClient: Credentials provided as arguments: username={}, password={}", username, password);
+ if (StringUtils.isBlank(username)) {
+ username = properties.getBrokerUsername();
+ password = properties.getBrokerPassword();
+ log.debug("BrokerClient: Credentials read from properties: username={}, password={}", username, password);
+ }
+
+ // Create connection factory
+ ActiveMQConnectionFactory connectionFactory = createConnectionFactory();
+ connectionFactory.setBrokerURL(connectionString);
+ if (StringUtils.isNotBlank(username) && password != null) {
+ connectionFactory.setUserName(username);
+ connectionFactory.setPassword(password);
+ }
+ log.debug("BrokerClient: Connection credentials: username={}, password={}", username, password);
+
+ // Create a Connection
+ log.info("BrokerClient: Connecting to broker: {}...", connectionString);
+ Connection connection = connectionFactory.createConnection();
+ connection.start();
+
+ // Create a Session
+ log.info("BrokerClient: Opening session...");
+ Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
+
+ this.connection = connection;
+ this.session = session;
+ }
+
+ public synchronized void closeConnection() throws JMSException {
+ // Clean up
+ session.close();
+ connection.close();
+ session = null;
+ connection = null;
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClientApp.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClientApp.java
new file mode 100644
index 00000000..0448fe2c
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClientApp.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient;
+
+import eu.melodic.event.brokerclient.event.EventGenerator;
+import eu.melodic.event.brokerclient.event.EventMap;
+import javax.jms.*;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.HashMap;
+import java.util.Map;
+
+@Slf4j
+public class BrokerClientApp {
+
+ public static void main(String args[]) throws java.io.IOException, JMSException {
+ if (args.length==0) {
+ usage();
+ return;
+ }
+
+ int aa=0;
+ String command = args[aa++];
+
+ String username = args.length>aa && args[aa].startsWith("-U") ? args[aa++].substring(2) : null;
+ String password = username!=null && args.length>aa && args[aa].startsWith("-P") ? args[aa++].substring(2) : null;
+ if (StringUtils.isNotBlank(username) && password == null) {
+ password = new String(System.console().readPassword("Enter broker password: "));
+ }
+ String broker_properties_configuration_file_location = args.length>aa && args[aa].startsWith("-C") ? args[aa++].substring(2) : null;
+ if (broker_properties_configuration_file_location == null){
+ broker_properties_configuration_file_location = new String(System.console().readLine());
+ }
+
+ // list destinations
+ if ("list".equalsIgnoreCase(command)) {
+ String url = args[aa++];
+ log.info("BrokerClientApp: Listing destinations:");
+ BrokerClient client = BrokerClient.newClient(username, password,broker_properties_configuration_file_location);
+ client.getDestinationNames(url).stream().forEach(d -> log.info(" {}", d));
+ } else
+ // send an event
+ if ("publish".equalsIgnoreCase(command)) {
+ String url = args[aa++];
+ String topic = args[aa++];
+ String value = args[aa++];
+ String level = args[aa++];
+ EventMap event = new EventMap(Double.parseDouble(value), Integer.parseInt(level), System.currentTimeMillis());
+ log.info("BrokerClientApp: Publishing event: {}", event);
+ BrokerClient client = BrokerClient.newClient(username, password,broker_properties_configuration_file_location);
+ client.publishEvent(url, topic, event);
+ } else
+ //publish an event with custom information
+ if ("custom_publish".equalsIgnoreCase(command)) {
+ Map data = new HashMap<>();
+ String url = args[aa++];
+ String topic = args[aa++];
+ while (aa+1 [-P]] ");
+ log.info("BrokerClientApp: client publish [-U [-P]] ");
+ log.info("BrokerClientApp: client receive [-U [-P]] ");
+ log.info("BrokerClientApp: client subscribe [-U [-P]] ");
+ log.info("BrokerClientApp: client generator [-U [-P]] ");
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerPublisher.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerPublisher.java
new file mode 100644
index 00000000..3bc69774
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerPublisher.java
@@ -0,0 +1,61 @@
+package eu.melodic.event.brokerclient;
+
+import lombok.extern.slf4j.Slf4j;
+
+import javax.jms.JMSException;
+import java.io.IOException;
+import java.util.Map;
+
+@Slf4j
+public class BrokerPublisher {
+
+ private String topic;
+ private String url;
+ private String username;
+ private String password;
+ private String broker_configuration_file_location;
+ private BrokerClient client = null;
+
+ public BrokerPublisher(String topic, String url, String username, String password,String broker_configuration_file_location){
+ this.topic = topic;
+ this.url = url;
+ this.username = username;
+ this.password = password;
+ this.broker_configuration_file_location = broker_configuration_file_location;
+ }
+
+ public void publish(Map event_map) {
+ try {
+ log.info("BrokerClientApp: Publishing to topic: {}", topic);
+ if (client==null) {
+ client = BrokerClient.newClient(username, password, broker_configuration_file_location);
+ }
+ client.publishEvent(url, topic, event_map);
+ }catch (IOException | JMSException i){
+ i.printStackTrace();
+ }
+ }
+
+ public void publish(String s) {
+ try {
+ log.info("BrokerClientApp: Publishing to topic: {}", topic);
+ if(client == null) {
+ client = BrokerClient.newClient(username, password, broker_configuration_file_location);
+ }
+ client._publishEvent(url, topic, s);
+ }catch (IOException | JMSException i){
+ i.printStackTrace();
+ }
+ }
+ public void publish(String s,boolean persistent_connection_demanded) {
+ try {
+ log.info("BrokerClientApp: Publishing to topic: {}", topic);
+ if (client==null) {
+ client = BrokerClient.newClient(username, password, broker_configuration_file_location);
+ }
+ client._publishEvent(url, topic, s,persistent_connection_demanded);
+ }catch (IOException | JMSException i){
+ i.printStackTrace();
+ }
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerSubscriber.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerSubscriber.java
new file mode 100644
index 00000000..19351cb7
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerSubscriber.java
@@ -0,0 +1,46 @@
+package eu.melodic.event.brokerclient;
+
+import lombok.extern.slf4j.Slf4j;
+
+import javax.jms.*;
+import java.io.IOException;
+import java.util.function.BiFunction;
+
+
+@Slf4j
+public class BrokerSubscriber {
+
+ private String topic;
+ private String url;
+ private String username;
+ private String password;
+ private String broker_configuration_file_location;
+ private BrokerClient client = null;
+
+ public BrokerSubscriber(String topic, String url, String username, String password, String broker_configuration_file_location){
+ this.topic = topic;
+ this.url = url;
+ this.username = username;
+ this.password = password;
+ this.broker_configuration_file_location = broker_configuration_file_location;
+ }
+
+
+ public void subscribe(BiFunction function) {
+ try {
+ log.info("BrokerClientApp: Subscribing to topic: {}", topic);
+ BrokerClient client = BrokerClient.newClient(username, password, broker_configuration_file_location);
+ client.receiveEvents(url, topic, message -> {
+ try {
+ function.apply(topic,((TextMessage) message).getText());
+ } catch (JMSException j) {
+ log.info("Shutting down subscriber...");
+ j.printStackTrace();
+ }
+ });
+ }catch (IOException | JMSException i){
+ i.printStackTrace();
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventGenerator.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventGenerator.java
new file mode 100644
index 00000000..4aab576d
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventGenerator.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient.event;
+
+import eu.melodic.event.brokerclient.BrokerClient;
+import lombok.Data;
+import lombok.extern.slf4j.Slf4j;
+
+@Data
+@Slf4j
+public class EventGenerator implements Runnable {
+ private BrokerClient client;
+ private String brokerUrl;
+ private String destinationName;
+ private long interval;
+ private long howmany = -1;
+ private double lowerValue;
+ private double upperValue;
+ private int level;
+
+ private transient boolean keepRunning;
+
+ public void start() {
+ if (keepRunning) return;
+ Thread runner = new Thread(this);
+ runner.setDaemon(true);
+ runner.start();
+ }
+
+ public void stop() {
+ keepRunning = false;
+ }
+
+ public void run() {
+ log.info("EventGenerator.run(): Start sending events: event-generator: {}", this);
+
+ keepRunning = true;
+ double valueRangeWidth = upperValue - lowerValue;
+ long countSent = 0;
+ while (keepRunning) {
+ try {
+ double newValue = Math.random() * valueRangeWidth + lowerValue;
+ EventMap event = new EventMap(newValue, level, System.currentTimeMillis());
+ log.info("EventGenerator.run(): Sending event #{}: {}", countSent + 1, event);
+ client.publishEvent(brokerUrl, destinationName, event);
+ countSent++;
+ if (countSent == howmany) keepRunning = false;
+ log.info("EventGenerator.run(): Event sent #{}: {}", countSent, event);
+ } catch (Exception ex) {
+ log.warn("EventGenerator.run(): WHILE-EXCEPTION: {}", ex);
+ }
+ // sleep for 'interval' ms
+ try {
+ if (keepRunning) {
+ Thread.sleep(interval);
+ }
+ } catch (InterruptedException ex) {
+ log.warn("EventGenerator.run(): Sleep interrupted");
+ }
+ }
+
+ log.info("EventGenerator.run(): Stop sending events: event-generator: {}", this);
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventMap.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventMap.java
new file mode 100644
index 00000000..a180508e
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventMap.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient.event;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+import lombok.Getter;
+import lombok.extern.slf4j.Slf4j;
+
+@Getter
+@Slf4j
+public class EventMap extends HashMap implements Serializable {
+ public EventMap() {
+ super();
+ }
+
+ public EventMap(Map map) {
+ super(map);
+ }
+
+ public EventMap(double metricValue, int level, long timestamp) {
+ put("metricValue", metricValue);
+ put("level", level);
+ put("timestamp", timestamp);
+ }
+
+ public static String[] getPropertyNames() {
+ return new String[]{"metricValue", "level", "timestamp"};
+ }
+
+ public static Class[] getPropertyClasses() {
+ return new Class[]{Double.class, Integer.class, Long.class};
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/properties/BrokerClientProperties.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/properties/BrokerClientProperties.java
new file mode 100644
index 00000000..25974e81
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/properties/BrokerClientProperties.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient.properties;
+
+import lombok.Data;
+import lombok.ToString;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.PropertySource;
+
+@Data
+@ToString(exclude = {"truststorePassword", "keystorePassword", "brokerPassword"})
+@Configuration
+@ConfigurationProperties(prefix = "brokerclient")
+@PropertySource("file:${MELODIC_CONFIG_DIR}/eu.melodic.event.brokerclient.properties")
+@Slf4j
+public class BrokerClientProperties {
+ @Value("${broker-name:broker}")
+ private String brokerName;
+ @Value("${broker-url:ssl://localhost:61616}")
+ private String brokerUrl;
+ @Value("${broker-url-properties:}")
+ private String brokerUrlProperties;
+ @Value("${ssl.client-auth.required:false}")
+ private boolean clientAuthRequired;
+ @Value("${connector-port:-1}")
+ private int connectorPort;
+ @Value("${preserve-connection:false}")
+ private boolean preserveConnection;
+
+ @Value("${ssl.truststore.file:}")
+ private String truststoreFile;
+ @Value("${ssl.truststore.type:}")
+ private String truststoreType;
+ @Value("${ssl.truststore.password:}")
+ private String truststorePassword;
+ @Value("${ssl.keystore.file:}")
+ private String keystoreFile;
+ @Value("${ssl.keystore.type:}")
+ private String keystoreType;
+ @Value("${ssl.keystore.password:}")
+ private String keystorePassword;
+
+ @Value("${broker-username:}")
+ private String brokerUsername;
+ @Value("${broker-password:}")
+ private String brokerPassword;
+
+ public BrokerClientProperties() {
+ brokerName = "broker";
+ brokerUrl = "ssl://localhost:61616}";
+ brokerUrlProperties = "";
+ connectorPort = -1;
+ preserveConnection = true;
+
+ truststoreFile = "";
+ truststoreType = "";
+ truststorePassword = "";
+ keystoreFile = "";
+ keystoreType = "";
+ keystorePassword = "";
+ clientAuthRequired = false;
+
+ brokerUsername = "";
+ brokerPassword = "";
+ }
+
+ public BrokerClientProperties(java.util.Properties p) {
+ brokerName = p.getProperty("brokerclient.broker-name", "broker");
+ brokerUrl = p.getProperty("brokerclient.broker-url", "ssl://localhost:61616}");
+ brokerUrlProperties = p.getProperty("brokerclient.broker-url-properties", "");
+ connectorPort = Integer.parseInt(p.getProperty("brokerclient.connector-port", "-1"));
+ preserveConnection = Boolean.parseBoolean(p.getProperty("brokerclient.preserve-connection", "true"));
+
+ truststoreFile = p.getProperty("brokerclient.ssl.truststore.file", "");
+ truststoreType = p.getProperty("brokerclient.ssl.truststore.type", "");
+ truststorePassword = p.getProperty("brokerclient.ssl.truststore.password", "");
+ keystoreFile = p.getProperty("brokerclient.ssl.keystore.file", "");
+ keystoreType = p.getProperty("brokerclient.ssl.keystore.type", "");
+ keystorePassword = p.getProperty("brokerclient.ssl.keystore.password", "");
+ clientAuthRequired = Boolean.parseBoolean(p.getProperty("brokerclient.ssl.client-auth.required", "false"));
+
+ brokerUsername = p.getProperty("brokerclient.broker-username", "");
+ brokerPassword = p.getProperty("brokerclient.broker-password", "");
+
+ brokerUrlProperties = brokerUrlProperties.replace("${brokerclient.ssl.client-auth.required}", Boolean.toString(clientAuthRequired));
+ }
+}
--
GitLab
From 282f4bd0c3d1c5e6355d033ee2975b48c9dbe042 Mon Sep 17 00:00:00 2001
From: mklkun
Date: Thu, 15 Apr 2021 18:29:22 +0200
Subject: [PATCH 12/25] Some code cleaning and refactoring
---
.../org/activeeon/morphemic/PAGateway.java | 114 ++++++------------
1 file changed, 38 insertions(+), 76 deletions(-)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
index 3aa2d25e..ef3b7ae1 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
@@ -39,15 +39,15 @@ public class PAGateway {
private final String paURL;
- public PAResourceManagerGateway resourceManagerGateway;
+ public final PAResourceManagerGateway resourceManagerGateway;
- public PASchedulerGateway schedulerGateway;
+ public final PASchedulerGateway schedulerGateway;
- public PAConnectorIaasGateway connectorIaasGateway;
+ public final PAConnectorIaasGateway connectorIaasGateway;
- final String NEW_LINE = System.getProperty("line.separator");
+ private final String NEW_LINE = System.getProperty("line.separator");
- final String SCRIPTS_SEPARATION = NEW_LINE + NEW_LINE + "# Main script" + NEW_LINE;
+ private final String SCRIPTS_SEPARATION = NEW_LINE + NEW_LINE + "# Main script" + NEW_LINE;
private static final Logger LOGGER = Logger.getLogger(PAGateway.class);
@@ -768,6 +768,7 @@ public class PAGateway {
LOGGER.error(String.format("Job [%s] not found", jobId));
return 1;
}
+
// Let's find the task:
Optional optTask = Optional.ofNullable(optJob.get().findTask(taskName));
if (!optTask.isPresent()) {
@@ -838,13 +839,7 @@ public class PAGateway {
List scriptTasks = buildScalingOutPATask(task, job, scaledTaskName);
if (scriptTasks != null && !scriptTasks.isEmpty()) {
- scriptTasks.forEach(scriptTask -> {
- try {
- paJob.addTask(scriptTask);
- } catch (UserException e) {
- LOGGER.error("Task " + task.getName() + " could not be added due to: " + e.toString());
- }
- });
+ addAllScriptTasksToPAJob(paJob, task, scriptTasks);
EntityManagerHelper.persist(task);
}
});
@@ -861,6 +856,16 @@ public class PAGateway {
LOGGER.info("Scaling out of task \'" + scaledTaskName + "\' job, submitted successfully. ID = " + submittedJobId);
}
+ private void addAllScriptTasksToPAJob(TaskFlowJob paJob, Task task, List scriptTasks) {
+ scriptTasks.forEach(scriptTask -> {
+ try {
+ paJob.addTask(scriptTask);
+ } catch (UserException e) {
+ LOGGER.error("Task " + task.getName() + " could not be added due to: " + e.toString());
+ }
+ });
+ }
+
private void setAllScalingOutMandatoryDependencies(TaskFlowJob paJob, Job jobToSubmit, String scaledTaskName, List newNodesNumbers) {
jobToSubmit.getTasks().forEach(task -> {
if (task.getParentTasks() != null && !task.getParentTasks().isEmpty()) {
@@ -909,16 +914,10 @@ public class PAGateway {
// When the scaled task is a child the task to be built
LOGGER.info("Building task " + task.getName() + " as a parent of task " + scaledTaskName);
scriptTasks.addAll(createParentScaledTask(task, job));
- } else if (scaledTaskName.equals(task.getName())) {
- // When the scaled task is the task to be built
- LOGGER.info("Building task " + task.getName() + " as it is scaled out");
- scriptTasks.addAll(buildScaledPATask(task, job));
- } else if (task.getParentTasks().contains(scaledTaskName)) {
- // When the scaled task is a parent of the task to be built
- LOGGER.info("Building task " + task.getName() + " as a child of task " + scaledTaskName);
- scriptTasks.addAll(createChildScaledTask(task, job));
} else {
- LOGGER.debug("Task " + task.getName() + " is not impacted by the scaling of task " + scaledTaskName);
+ // Using buildScalingInPATask because it handles all the remaining cases
+ LOGGER.info("Moving to building with buildScalingInPATask() method");
+ scriptTasks.addAll(buildScalingInPATask(task, job, scaledTaskName));
}
return scriptTasks;
@@ -990,17 +989,7 @@ public class PAGateway {
deployment.setNodeAccessToken(token);
// Creating application deployment tasks
- List appTasks = createAppTasks(task, suffix, token, job);
- task.setDeploymentLastSubmittedTaskName(appTasks.get(appTasks.size()-1).getName().substring(0, appTasks.get(appTasks.size()-1).getName().lastIndexOf(suffix)));
-
- // Creating infra preparation task
- appTasks.add(0, createInfraPreparationTask(task, suffix, token, job));
- appTasks.get(1).addDependence(appTasks.get(0));
-
- // Add dependency between infra and application deployment tasks
- appTasks.get(0).addDependence(scriptTasks.get(scriptTasks.size()-1));
-
- scriptTasks.addAll(appTasks);
+ createAndAddAppDeploymentTasks(task, suffix, token, scriptTasks, job);
});
scriptTasks.forEach(scriptTask -> task.addSubmittedTaskName(scriptTask.getName()));
@@ -1008,6 +997,20 @@ public class PAGateway {
return scriptTasks;
}
+ private void createAndAddAppDeploymentTasks(Task task, String suffix, String token, List scriptTasks, Job job) {
+ List appTasks = createAppTasks(task, suffix, token, job);
+ task.setDeploymentLastSubmittedTaskName(appTasks.get(appTasks.size()-1).getName().substring(0, appTasks.get(appTasks.size()-1).getName().lastIndexOf(suffix)));
+
+ // Creating infra preparation task
+ appTasks.add(0, createInfraPreparationTask(task, suffix, token, job));
+ appTasks.get(1).addDependence(appTasks.get(0));
+
+ // Add dependency between infra and application deployment tasks
+ appTasks.get(0).addDependence(scriptTasks.get(scriptTasks.size()-1));
+
+ scriptTasks.addAll(appTasks);
+ }
+
private List createParentScaledTask(Task task, Job job) {
List scriptTasks = new LinkedList<>();
task.getDeployments().stream().filter(Deployment::getIsDeployed).forEach(deployment -> {
@@ -1119,18 +1122,12 @@ public class PAGateway {
List scriptTasks = buildScalingInPATask(task, job, scaledTaskName);
if (scriptTasks != null && !scriptTasks.isEmpty()) {
- scriptTasks.forEach(scriptTask -> {
- try {
- paJob.addTask(scriptTask);
- } catch (UserException e) {
- LOGGER.error("Task " + task.getName() + " could not be added due to: " + e.toString());
- }
- });
+ addAllScriptTasksToPAJob(paJob, task, scriptTasks);
EntityManagerHelper.persist(task);
}
});
- setAllScalingInMandatoryDependencies(paJob, job, scaledTaskName);
+ setAllMandatoryDependencies(paJob, job);
paJob.setProjectName("Morphemic");
@@ -1142,29 +1139,8 @@ public class PAGateway {
LOGGER.info("Scaling out of task \'" + scaledTaskName + "\' job, submitted successfully. ID = " + submittedJobId);
}
- private void setAllScalingInMandatoryDependencies(TaskFlowJob paJob, Job jobToSubmit, String scaledTaskName) {
- Task scaledTask = jobToSubmit.findTask(scaledTaskName);
- jobToSubmit.getTasks().forEach(task -> {
- if (task.getParentTasks() != null && !task.getParentTasks().isEmpty()) {
- task.getParentTasks().forEach(parentTaskName -> {
- paJob.getTasks().forEach(paTask -> {
- paJob.getTasks().forEach(paParentTask -> {
- if (paTask.getName().contains(task.getName()) && paParentTask.getName().contains(parentTaskName)) {
- if (paTask.getName().contains(task.getDeploymentFirstSubmittedTaskName()) &&
- paParentTask.getName().contains(jobToSubmit.findTask(parentTaskName).getDeploymentLastSubmittedTaskName())) {
- paTask.addDependence(paParentTask);
- }
- }
- });
- });
- });
- }
- });
- }
-
private List buildScalingInPATask(Task task, Job job, String scaledTaskName) {
List scriptTasks = new LinkedList<>();
- Task scaledTask = job.findTask(scaledTaskName);
if (scaledTaskName.equals(task.getName())) {
// When the scaled task is the task to be built
@@ -1212,17 +1188,7 @@ public class PAGateway {
// Creating application deployment tasks
- List appTasks = createAppTasks(task, suffix, token, job);
- task.setDeploymentLastSubmittedTaskName(appTasks.get(appTasks.size()-1).getName().substring(0, appTasks.get(appTasks.size()-1).getName().lastIndexOf(suffix)));
-
- // Creating infra preparation task
- appTasks.add(0, createInfraPreparationTask(task, suffix, token, job));
- appTasks.get(1).addDependence(appTasks.get(0));
-
- // Add dependency between infra and application deployment tasks
- appTasks.get(0).addDependence(scriptTasks.get(scriptTasks.size()-1));
-
- scriptTasks.addAll(appTasks);
+ createAndAddAppDeploymentTasks(task, suffix, token, scriptTasks, job);
});
task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
}
@@ -1416,10 +1382,6 @@ public class PAGateway {
String.valueOf(submittedJob.getSubmittedJobId()),
submittedTaskName));
});
-
- TaskResult taskResult = schedulerGateway.getTaskResult(String.valueOf(submittedJob.getSubmittedJobId()),
- createdTask.getSubmittedTaskNames()
- .get(createdTask.getSubmittedTaskNames().size() - 1));
LOGGER.info("Results of task: " + taskName + " fetched successfully: " + taskResultsMap.toString());
return taskResultsMap;
}
--
GitLab
From 221da3fa238981be53e0281fc0a4de5b8f4dc9f1 Mon Sep 17 00:00:00 2001
From: mklkun
Date: Fri, 16 Apr 2021 17:37:40 +0200
Subject: [PATCH 13/25] Fix ports to be open jackson mapping issue
---
.../src/main/java/org/activeeon/morphemic/PAGateway.java | 4 ++--
.../src/main/java/org/activeeon/morphemic/model/Job.java | 4 ++--
.../src/main/java/org/activeeon/morphemic/model/Port.java | 1 +
.../org/activeeon/morphemic/service/NodeCandidateUtils.java | 2 +-
4 files changed, 6 insertions(+), 5 deletions(-)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
index ef3b7ae1..91f8ef1f 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
@@ -1,5 +1,6 @@
package org.activeeon.morphemic;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.activeeon.morphemic.application.deployment.PAFactory;
import org.activeeon.morphemic.application.deployment.PASchedulerGateway;
import org.activeeon.morphemic.infrastructure.deployment.PAConnectorIaasGateway;
@@ -8,7 +9,6 @@ import org.activeeon.morphemic.model.*;
import org.activeeon.morphemic.service.*;
import org.apache.commons.lang3.Validate;
import org.apache.log4j.Logger;
-import org.codehaus.jackson.map.ObjectMapper;
import org.json.JSONArray;
import org.json.JSONObject;
import org.ow2.proactive.resourcemanager.exception.RMException;
@@ -728,7 +728,7 @@ public class PAGateway {
nodeConfigJson += "\"}";
} else {
try {
- nodeConfigJson += "\", \"portToOpens\": " + mapper.writeValueAsString(task.getPortsToOpen()) + "}";
+ nodeConfigJson += "\", \"portsToOpen\": " + mapper.writeValueAsString(task.getPortsToOpen()) + "}";
} catch (IOException e) {
LOGGER.error(e.getStackTrace());
}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
index 7970f508..172ccae4 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
@@ -1,8 +1,8 @@
package org.activeeon.morphemic.model;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
import lombok.*;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectWriter;
import javax.persistence.*;
import java.io.IOException;
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Port.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Port.java
index 17a3af06..5def6db2 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Port.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Port.java
@@ -23,6 +23,7 @@ public class Port implements Serializable {
@Column(name = "VALUE")
private Integer value;
+ @JsonIgnore
@Column(name = "REQUESTED_NAME")
private String requestedName;
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/NodeCandidateUtils.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/NodeCandidateUtils.java
index b0a47cac..3f943daa 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/NodeCandidateUtils.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/NodeCandidateUtils.java
@@ -1,10 +1,10 @@
package org.activeeon.morphemic.service;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.activeeon.morphemic.infrastructure.deployment.PAConnectorIaasGateway;
import org.activeeon.morphemic.model.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
-import org.codehaus.jackson.map.ObjectMapper;
import org.json.JSONArray;
import org.json.JSONObject;
--
GitLab
From b3bb9e3c5a96ac62fe5d5d0a0af116fc3aa58fbd Mon Sep 17 00:00:00 2001
From: Jean-Didier Totow
Date: Mon, 19 Apr 2021 17:30:15 +0300
Subject: [PATCH 14/25] upgrade stomp to version 7
---
.../database/data/meta/meta.db | Bin 148 -> 178 bytes
.../database/entrypoint.sh | 2 +-
.../inputapi/src/activemqlistermanager.py | 16 +-
.../database/inputapi/src/app.py | 1 -
.../dataset-builder/datasets/demo.csv | 284 ++----------------
.../docker-compose.yaml | 2 +
.../example/influxdb_querier.py | 2 +-
.../example/publisher.py | 3 +-
.../example/subscriber.py | 10 +-
9 files changed, 45 insertions(+), 275 deletions(-)
diff --git a/morphemic-persistent-storage/database/data/meta/meta.db b/morphemic-persistent-storage/database/data/meta/meta.db
index cfa772e05ee150b92492c912a03600ba77b07d97..1ffc2dba105f54095bffb34565de252b010024af 100644
GIT binary patch
delta 111
zcmbQjxQS7KgF%2*;$`Ea)t@#W=oi=WnkXP@YQn{ySXz>wo|-4XAkomUq{bug~uhQ((|y;$WI+V=ar2`m^Kf@0+_JQj8W14on_Q
E02x~<9{>OV
delta 81
zcmdnQIE7JwgF%2r;(N=N4R@~m+8d#zGf_ZPT#So7v9u&VJvC2&L875y$;tl*I~f%i
lCq`Mz9{F=|?T;N|K+%Tw*$q8A#S|E{m^c_M7#tWq7y
Date: Mon, 19 Apr 2021 14:39:28 +0000
Subject: [PATCH 15/25] Improve persistence configuration initializing
mechanism
---
.../service/EntityManagerHelper.java | 28 +++++++++-
.../morphemic/service/PathHelper.java | 17 ++++++
.../service/PersistenceConfiguration.java | 55 +++++++++++++++++++
3 files changed, 98 insertions(+), 2 deletions(-)
create mode 100644 scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java
create mode 100644 scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
index 5bad8175..7d11f428 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
@@ -1,9 +1,13 @@
package org.activeeon.morphemic.service;
+import org.apache.commons.configuration2.ex.ConfigurationException;
+import org.apache.log4j.Logger;
import org.hibernate.CacheMode;
import org.hibernate.jpa.QueryHints;
import javax.persistence.*;
+import java.util.HashMap;
+import java.util.Map;
public class EntityManagerHelper {
@@ -11,8 +15,28 @@ public class EntityManagerHelper {
private static final ThreadLocal threadLocal;
+ private static final Logger LOGGER = Logger.getLogger(EntityManagerHelper.class);
+
static {
- emf = Persistence.createEntityManagerFactory("model");
+ Map persistenceConfiguration = new HashMap<>();
+ try {
+ // Load the persistence configurations
+ persistenceConfiguration = PersistenceConfiguration.getAllPersistenceConfigurationPropertiesAsMap();
+ LOGGER.info("Initializing the persistence with custom configurations...");
+
+ } catch (ConfigurationException | NullPointerException e) {
+ /*
+ * In case the properties file was not found, this catch statement is triggered
+ * The EMF will be initialized with default configurations
+ *
+ * The NullPointerException is triggered if the environment variable to locate the properties file
+ * is not found
+ */
+ LOGGER.info("Initializing the persistence with default configurations...");
+ }
+
+ emf = Persistence.createEntityManagerFactory("model", persistenceConfiguration);
+ LOGGER.info("Initializing complete!");
threadLocal = new ThreadLocal();
}
@@ -76,4 +100,4 @@ public class EntityManagerHelper {
public static void commit() {
getEntityManager().getTransaction().commit();
}
-}
\ No newline at end of file
+}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java
new file mode 100644
index 00000000..63afae60
--- /dev/null
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java
@@ -0,0 +1,17 @@
+package org.activeeon.morphemic.service;
+
+public class PathHelper {
+
+ private final static String propertiesFileName = "eu.morphemic.schedulingAbstractionLayer.properties";
+
+ // Environment Variable Name = MELODIC_CONFIG_DIR
+ private final static String propertiesFileEnvironmentVariableName = "melodic.config.dir";
+
+ public static String getPersistencePropertiesFilePath(){
+ String path = System.getProperty(propertiesFileEnvironmentVariableName);
+ if(path.endsWith("/")){
+ path = path.substring(0, path.length() - 1);
+ }
+ return path+"/"+propertiesFileName;
+ }
+}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java
new file mode 100644
index 00000000..70565d31
--- /dev/null
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java
@@ -0,0 +1,55 @@
+package org.activeeon.morphemic.service;
+
+import org.apache.commons.configuration2.Configuration;
+import org.apache.commons.configuration2.PropertiesConfiguration;
+import org.apache.commons.configuration2.builder.FileBasedConfigurationBuilder;
+import org.apache.commons.configuration2.builder.fluent.Parameters;
+import org.apache.commons.configuration2.builder.fluent.PropertiesBuilderParameters;
+import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
+import org.apache.commons.configuration2.convert.ListDelimiterHandler;
+import org.apache.commons.configuration2.ex.ConfigurationException;
+import org.apache.log4j.Logger;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class PersistenceConfiguration {
+
+ private static final ListDelimiterHandler DELIMITER = new DefaultListDelimiterHandler(';');
+
+ private static final Logger LOGGER = Logger.getLogger(PersistenceConfiguration.class);
+
+ public static String PERSISTENCE_URL = "sal.persistence.url";
+
+ public static String PERSISTENCE_USERNAME = "sal.persistence.username";
+
+ public static String PERSISTENCE_PASSWORD = "sal.persistence.password";
+
+ public static Configuration loadPersistenceConfiguration () throws ConfigurationException {
+ String PERSISTENCE_PROPERTIES_FILE_PATH = PathHelper.getPersistencePropertiesFilePath();
+ PropertiesBuilderParameters propertyParameters = new Parameters().properties();
+ propertyParameters.setPath(PERSISTENCE_PROPERTIES_FILE_PATH);
+ propertyParameters.setThrowExceptionOnMissing(true);
+ propertyParameters.setListDelimiterHandler(DELIMITER);
+
+ FileBasedConfigurationBuilder builder = new FileBasedConfigurationBuilder<>(PropertiesConfiguration.class);
+
+ builder.configure(propertyParameters);
+
+ LOGGER.debug("Persistence configuration loaded");
+
+ return builder.getConfiguration();
+ }
+
+ public static Map getAllPersistenceConfigurationPropertiesAsMap() throws ConfigurationException {
+ Map persistenceProperties = new HashMap<>();
+
+ Configuration persistenceConfiguration = loadPersistenceConfiguration();
+
+ persistenceProperties.put(PERSISTENCE_URL, persistenceConfiguration.getString(PERSISTENCE_URL));
+ persistenceProperties.put(PERSISTENCE_USERNAME, persistenceConfiguration.getString(PERSISTENCE_USERNAME));
+ persistenceProperties.put(PERSISTENCE_PASSWORD, persistenceConfiguration.getString(PERSISTENCE_PASSWORD));
+
+ return persistenceProperties;
+ }
+}
--
GitLab
From 57edb880e5c9403c181ace08e0004f4d4036b023 Mon Sep 17 00:00:00 2001
From: Jean-Didier Totow
Date: Tue, 20 Apr 2021 11:53:52 +0300
Subject: [PATCH 16/25] date format fix
---
morphemic-datasetmaker/morphemic/dataset/__init__.py | 6 +++++-
1 file changed, 5 insertions(+), 1 deletion(-)
diff --git a/morphemic-datasetmaker/morphemic/dataset/__init__.py b/morphemic-datasetmaker/morphemic/dataset/__init__.py
index db2c96f2..b19e48b3 100644
--- a/morphemic-datasetmaker/morphemic/dataset/__init__.py
+++ b/morphemic-datasetmaker/morphemic/dataset/__init__.py
@@ -10,7 +10,11 @@ class Row():
self.features = features
if "time" in self.features:
time_str = self.features["time"]
- _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%S.%fZ')
+ _obj = None
+ try:
+ _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%S.%fZ')
+ except:
+ _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%SZ')
self.features["time"] = int(_obj.timestamp())
if 'application' in metricsname:
metricsname.remove('application')
--
GitLab
From 0ed72d0c2018a1455c47fe1cedbfe53ff2989111 Mon Sep 17 00:00:00 2001
From: Ali Fahs
Date: Tue, 20 Apr 2021 09:31:41 +0000
Subject: [PATCH 17/25] Add getLengthOfNodeCandidates() and
getListOfActiveVMs() endpoints to PAGateway
---
.../org/activeeon/morphemic/PAGateway.java | 20 +++++++++++++++++++
.../deployment/PAResourceManagerGateway.java | 10 ++++++++++
2 files changed, 30 insertions(+)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
index 91f8ef1f..2aae7e16 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
@@ -11,6 +11,7 @@ import org.apache.commons.lang3.Validate;
import org.apache.log4j.Logger;
import org.json.JSONArray;
import org.json.JSONObject;
+import org.ow2.proactive.resourcemanager.common.event.RMNodeEvent;
import org.ow2.proactive.resourcemanager.exception.RMException;
import org.ow2.proactive.scheduler.common.exception.NotConnectedException;
import org.ow2.proactive.scheduler.common.exception.UserException;
@@ -312,6 +313,25 @@ public class PAGateway {
return filteredNodeCandidates;
}
+ /**
+ * This function returns the number of available node candidates according to the added clouds
+ * @return the number of available node candidates
+ */
+ public int getLengthOfNodeCandidates() {
+ List allNodeCandidates = EntityManagerHelper.createQuery("SELECT nc FROM NodeCandidate nc",
+ NodeCandidate.class).getResultList();
+ return allNodeCandidates.size();
+ }
+
+ /**
+ * This function returns a list of available VMs
+ * @return rmNodeEvents a list of available Nodes and their associate parameters
+ */
+ public List getListOfActiveVMs() throws NotConnectedException, PermissionRestException {
+ List rmNodeEvents = resourceManagerGateway.getListOfNodesEvents();
+ return rmNodeEvents;
+ }
+
/**
* Define a node source in PA server related to a deployment information
* @param nodeSourceName A valid and unique node source name
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
index 883866de..7ae0023f 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
@@ -5,6 +5,7 @@ import org.apache.http.conn.ConnectTimeoutException;
import org.apache.log4j.Logger;
import org.ow2.proactive.resourcemanager.common.NSState;
import org.ow2.proactive.resourcemanager.common.event.RMNodeEvent;
+import org.ow2.proactive.resourcemanager.common.event.RMNodeSourceEvent;
import org.ow2.proactive.resourcemanager.common.event.dto.RMStateFull;
import org.ow2.proactive.resourcemanager.exception.RMException;
import org.ow2.proactive.resourcemanager.exception.RMNodeException;
@@ -86,6 +87,15 @@ public class PAResourceManagerGateway {
rmRestInterface = RMConnectionHelper.init(paURL);
}
+ /**
+ * Get the available VMs at the proactive server
+ * @return rmNodeEvents the list of the available VMs
+ */
+ public List getListOfNodesEvents() throws NotConnectedException, PermissionRestException {
+ RMStateFull rmStateFull = rmRestInterface.getRMStateFull(RMConnectionHelper.getSessionId());
+ List rmNodeEvents = rmStateFull.getNodesEvents();
+ return rmNodeEvents;
+ }
/**
* Connect to the ProActive server
--
GitLab
From c7b4401de85040f1578463cfdca0d395e5225e6a Mon Sep 17 00:00:00 2001
From: mklkun
Date: Thu, 22 Apr 2021 14:37:39 +0200
Subject: [PATCH 18/25] Fix persistence configuration map's creation
---
.../morphemic/service/PersistenceConfiguration.java | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java
index 70565d31..e8231b25 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java
@@ -42,13 +42,17 @@ public class PersistenceConfiguration {
}
public static Map getAllPersistenceConfigurationPropertiesAsMap() throws ConfigurationException {
+ final String JAVAX_URL_PROP = "javax.persistence.jdbc.url";
+ final String JAVAX_USERNAME_PROP = "javax.persistence.jdbc.user";
+ final String JAVAX_PASSWORD_PROP = "javax.persistence.jdbc.password";
+
Map persistenceProperties = new HashMap<>();
Configuration persistenceConfiguration = loadPersistenceConfiguration();
- persistenceProperties.put(PERSISTENCE_URL, persistenceConfiguration.getString(PERSISTENCE_URL));
- persistenceProperties.put(PERSISTENCE_USERNAME, persistenceConfiguration.getString(PERSISTENCE_USERNAME));
- persistenceProperties.put(PERSISTENCE_PASSWORD, persistenceConfiguration.getString(PERSISTENCE_PASSWORD));
+ persistenceProperties.put(JAVAX_URL_PROP, persistenceConfiguration.getString(PERSISTENCE_URL));
+ persistenceProperties.put(JAVAX_USERNAME_PROP, persistenceConfiguration.getString(PERSISTENCE_USERNAME));
+ persistenceProperties.put(JAVAX_PASSWORD_PROP, persistenceConfiguration.getString(PERSISTENCE_PASSWORD));
return persistenceProperties;
}
--
GitLab
From 4edfb010586b9d684532cb045ca3e2829f270edd Mon Sep 17 00:00:00 2001
From: Jean-Didier Totow
Date: Fri, 23 Apr 2021 10:57:57 +0300
Subject: [PATCH 19/25] code optimization
---
morphemic-persistent-storage/database/inputapi/src/app.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/morphemic-persistent-storage/database/inputapi/src/app.py b/morphemic-persistent-storage/database/inputapi/src/app.py
index 01a151a4..3e7755b1 100644
--- a/morphemic-persistent-storage/database/inputapi/src/app.py
+++ b/morphemic-persistent-storage/database/inputapi/src/app.py
@@ -420,7 +420,7 @@ class Ingestor(Thread):
fields = json.loads(content)
except Exception as e:
print("Cannot decode json")
- print("content", content)
+ #print("content", content)
return False
# self.tolerance_manager.addTime(fields["application"], fields["timestamp"])
application = fields[metric_name_field_application]
@@ -468,7 +468,7 @@ class Ingestor(Thread):
except Exception as e:
print("An Error occur while inserting data point")
print(e)
- print("content", point)
+ #print("content", point)
return False
--
GitLab
From 4d7804e0220e26302f396d88685a42abc40622fc Mon Sep 17 00:00:00 2001
From: Jean-Didier Totow
Date: Fri, 23 Apr 2021 11:01:40 +0300
Subject: [PATCH 20/25] datasetlib bug fix
---
morphemic-datasetmaker/morphemic/dataset/__init__.py | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/morphemic-datasetmaker/morphemic/dataset/__init__.py b/morphemic-datasetmaker/morphemic/dataset/__init__.py
index db2c96f2..a8a477df 100644
--- a/morphemic-datasetmaker/morphemic/dataset/__init__.py
+++ b/morphemic-datasetmaker/morphemic/dataset/__init__.py
@@ -10,7 +10,11 @@ class Row():
self.features = features
if "time" in self.features:
time_str = self.features["time"]
- _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%S.%fZ')
+ _obj = None
+ try:
+ _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%S.%fZ')
+ except:
+ _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%SZ')
self.features["time"] = int(_obj.timestamp())
if 'application' in metricsname:
metricsname.remove('application')
@@ -18,6 +22,7 @@ class Row():
if not field_name in self.features:
self.features[field_name] = None
+
def getTime(self):
if "time" in self.features:
return self.features["time"]
--
GitLab
From 13ad06db389618685fa73dc63b786fb3c54c91e1 Mon Sep 17 00:00:00 2001
From: mklkun
Date: Wed, 28 Apr 2021 15:09:18 +0200
Subject: [PATCH 21/25] Exclude some libraries in pom file
---
scheduling-abstraction-layer/pom.xml | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/scheduling-abstraction-layer/pom.xml b/scheduling-abstraction-layer/pom.xml
index c1337e42..d5e12561 100644
--- a/scheduling-abstraction-layer/pom.xml
+++ b/scheduling-abstraction-layer/pom.xml
@@ -23,12 +23,32 @@
rest-smartproxy
11.1.0-SNAPSHOT
compile
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
org.ow2.proactive_grid_cloud_portal
rest-api
11.1.0-SNAPSHOT
compile
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ org.springframework
+ spring-expression
+
+
+ org.springframework
+ spring-core
+
+
commons-beanutils
--
GitLab
From f7e61165552645075f5dd839213bf907ce4b6a13 Mon Sep 17 00:00:00 2001
From: mklkun
Date: Wed, 28 Apr 2021 23:16:22 +0200
Subject: [PATCH 22/25] Fix persistence configuration pulling in SAL
---
.../main/java/org/activeeon/morphemic/service/PathHelper.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java
index 63afae60..f2b72ab5 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java
@@ -5,10 +5,10 @@ public class PathHelper {
private final static String propertiesFileName = "eu.morphemic.schedulingAbstractionLayer.properties";
// Environment Variable Name = MELODIC_CONFIG_DIR
- private final static String propertiesFileEnvironmentVariableName = "melodic.config.dir";
+ private final static String propertiesFileEnvironmentVariableName = "MELODIC_CONFIG_DIR";
public static String getPersistencePropertiesFilePath(){
- String path = System.getProperty(propertiesFileEnvironmentVariableName);
+ String path = System.getenv(propertiesFileEnvironmentVariableName);
if(path.endsWith("/")){
path = path.substring(0, path.length() - 1);
}
--
GitLab
From d3811b2036b14b9657b2ac817bbb938aa25301c1 Mon Sep 17 00:00:00 2001
From: mklkun
Date: Tue, 4 May 2021 19:47:31 +0200
Subject: [PATCH 23/25] Add a get all cloud images endpoint and fix javadoc
---
.../org/activeeon/morphemic/PAGateway.java | 33 ++++++++++++++++
.../application/deployment/PAFactory.java | 1 +
.../deployment/PAResourceManagerGateway.java | 5 ++-
.../org/activeeon/morphemic/model/Job.java | 1 +
.../activeeon/morphemic/model/NodeType.java | 38 -------------------
.../morphemic/service/RMConnectionHelper.java | 1 -
.../service/SchedulerConnectionHelper.java | 3 +-
7 files changed, 40 insertions(+), 42 deletions(-)
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
index 2aae7e16..f02b1d59 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
@@ -35,6 +35,7 @@ import java.security.KeyException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
+import java.util.stream.IntStream;
public class PAGateway {
@@ -80,6 +81,7 @@ public class PAGateway {
/**
* Disconnect from the ProActive server
+ * @throws NotConnectedException In case the user is not connected
*/
public void disconnect() throws NotConnectedException {
LOGGER.debug("Disconnecting from RM...");
@@ -323,9 +325,29 @@ public class PAGateway {
return allNodeCandidates.size();
}
+ /**
+ * This function returns the list of all available images related to a registered cloud
+ * @param cloudID A valid cloud identifier
+ * @return A list of available images
+ */
+ public List getAllCloudImages(String cloudID) {
+ List filteredImages = new LinkedList<>();
+ List allImages = EntityManagerHelper.createQuery("SELECT img FROM Image img",
+ Image.class).getResultList();
+ PACloud paCloud = EntityManagerHelper.find(PACloud.class, cloudID);
+ JSONArray imagesArray = connectorIaasGateway.getImages(paCloud.getDummyInfrastructureName());
+ List imagesIDs = IntStream.range(0, imagesArray.length())
+ .mapToObj(imagesArray::get)
+ .map(blaBla -> ((JSONObject)blaBla).optString("id")).collect(Collectors.toList());
+ allImages.stream().filter(blaTest -> imagesIDs.contains(blaTest.getId())).forEach(filteredImages::add);
+ return filteredImages;
+ }
+
/**
* This function returns a list of available VMs
* @return rmNodeEvents a list of available Nodes and their associate parameters
+ * @throws NotConnectedException In case the user is not connected
+ * @throws PermissionRestException In case the user does not have valid permissions
*/
public List getListOfActiveVMs() throws NotConnectedException, PermissionRestException {
List rmNodeEvents = resourceManagerGateway.getListOfNodesEvents();
@@ -390,6 +412,7 @@ public class PAGateway {
/**
* Add an EMS deployment to a defined job
+ * @param nodeNames Names of the nodes to which to add EMS deployment
* @param authorizationBearer The authorization bearer used by upperware's components to authenticate with each other. Needed by the EMS.
* @return return 0 if the deployment task is properly added.
*/
@@ -1325,6 +1348,16 @@ public class PAGateway {
return(submittedJobId);
}
+ /**
+ * Stop the deployed job
+ * @param jobId A deployed job identifier
+ * @return The submitted stopping job id
+ */
+ public long stopJob(String jobId) {
+ //TODO: Submit stopping scripts for all apps
+ return 0L;
+ }
+
/**
* Get a ProActive job state
* @param jobId A job ID
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/application/deployment/PAFactory.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/application/deployment/PAFactory.java
index dba3f4e4..0b81fb42 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/application/deployment/PAFactory.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/application/deployment/PAFactory.java
@@ -180,6 +180,7 @@ public class PAFactory {
* @param scriptFileName The script implementation file name
* @param parameters The selection script parameters
* @return A ProActive SelectionScript instance
+ * @throws IOException In case an IOException is thrown
*/
public static SelectionScript createGroovySelectionScript(String scriptFileName, String[] parameters) throws IOException {
SelectionScript selectionScript = null;
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
index 7ae0023f..b56d4d44 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
@@ -90,6 +90,8 @@ public class PAResourceManagerGateway {
/**
* Get the available VMs at the proactive server
* @return rmNodeEvents the list of the available VMs
+ * @throws NotConnectedException In case the user is not connected
+ * @throws PermissionRestException In case the user does not have valid permissions
*/
public List getListOfNodesEvents() throws NotConnectedException, PermissionRestException {
RMStateFull rmStateFull = rmRestInterface.getRMStateFull(RMConnectionHelper.getSessionId());
@@ -104,7 +106,6 @@ public class PAResourceManagerGateway {
* @throws LoginException In case the login is not valid
* @throws KeyException In case the password is not valid
* @throws RMException In case an error happens in the RM
- * @throws NotConnectedException In case the session id is invalid
*/
public void connect(String username, String password) throws LoginException, KeyException, RMException {
RMConnectionHelper.connect(username,password);
@@ -194,6 +195,8 @@ public class PAResourceManagerGateway {
* @param all When true, the search return nodes which contain all tags;
* when false, the search return nodes which contain any tag among the list tags.
* @return the set of urls which match the search condition
+ * @throws NotConnectedException In case the user is not connected
+ * @throws RestException In case a Rest exception is thrown
*/
public List searchNodes(List tags, boolean all) throws NotConnectedException, RestException {
LOGGER.debug("Search for nodes ...");
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
index 172ccae4..be83b1b4 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
@@ -46,6 +46,7 @@ public class Job implements Serializable {
* Transform a job into JSON format
*
* @return the JSON representation of the job
+ * @throws IOException In case an IOException is thrown
*/
public String getJobInJson() throws IOException{
ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/NodeType.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/NodeType.java
index f9ff1490..c3899c47 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/NodeType.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/NodeType.java
@@ -18,8 +18,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #IAAS_VALUE
- * @generated
- * @ordered
*/
IAAS(0, "IAAS", "IAAS"),
@@ -28,8 +26,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #PAAS_VALUE
- * @generated
- * @ordered
*/
PAAS(1, "PAAS", "PAAS"),
@@ -38,8 +34,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #FAAS_VALUE
- * @generated
- * @ordered
*/
FAAS(2, "FAAS", "FAAS"),
@@ -48,16 +42,12 @@ public enum NodeType implements Enumerator {
*
*
* @see #BYON_VALUE
- * @generated
- * @ordered
*/
BYON(3, "BYON", "BYON"), /**
* The 'SIMULATION' literal object.
*
*
* @see #SIMULATION_VALUE
- * @generated
- * @ordered
*/
SIMULATION(4, "SIMULATION", "SIMULATION");
@@ -70,9 +60,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #IAAS
- * @model
- * @generated
- * @ordered
*/
public static final int IAAS_VALUE = 0;
@@ -85,9 +72,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #PAAS
- * @model
- * @generated
- * @ordered
*/
public static final int PAAS_VALUE = 1;
@@ -100,9 +84,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #FAAS
- * @model
- * @generated
- * @ordered
*/
public static final int FAAS_VALUE = 2;
@@ -115,9 +96,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #BYON
- * @model
- * @generated
- * @ordered
*/
public static final int BYON_VALUE = 3;
@@ -130,9 +108,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #SIMULATION
- * @model
- * @generated
- * @ordered
*/
public static final int SIMULATION_VALUE = 4;
@@ -140,7 +115,6 @@ public enum NodeType implements Enumerator {
* An array of all the 'Node Type' enumerators.
*
*
- * @generated
*/
private static final NodeType[] VALUES_ARRAY =
new NodeType[] {
@@ -155,7 +129,6 @@ public enum NodeType implements Enumerator {
* A public read-only list of all the 'Node Type' enumerators.
*
*
- * @generated
*/
public static final List VALUES = Collections.unmodifiableList(Arrays.asList(VALUES_ARRAY));
@@ -165,7 +138,6 @@ public enum NodeType implements Enumerator {
*
* @param literal the literal.
* @return the matching enumerator or null
.
- * @generated
*/
public static NodeType get(String literal) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
@@ -183,7 +155,6 @@ public enum NodeType implements Enumerator {
*
* @param name the name.
* @return the matching enumerator or null
.
- * @generated
*/
public static NodeType getByName(String name) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
@@ -201,7 +172,6 @@ public enum NodeType implements Enumerator {
*
* @param value the integer value.
* @return the matching enumerator or null
.
- * @generated
*/
public static NodeType get(int value) {
switch (value) {
@@ -217,21 +187,18 @@ public enum NodeType implements Enumerator {
/**
*
*
- * @generated
*/
private final int value;
/**
*
*
- * @generated
*/
private final String name;
/**
*
*
- * @generated
*/
private final String literal;
@@ -239,7 +206,6 @@ public enum NodeType implements Enumerator {
* Only this class can construct instances.
*
*
- * @generated
*/
private NodeType(int value, String name, String literal) {
this.value = value;
@@ -250,7 +216,6 @@ public enum NodeType implements Enumerator {
/**
*
*
- * @generated
*/
public int getValue() {
return value;
@@ -259,7 +224,6 @@ public enum NodeType implements Enumerator {
/**
*
*
- * @generated
*/
public String getName() {
return name;
@@ -268,7 +232,6 @@ public enum NodeType implements Enumerator {
/**
*
*
- * @generated
*/
public String getLiteral() {
return literal;
@@ -278,7 +241,6 @@ public enum NodeType implements Enumerator {
* Returns the literal value of the enumerator, which is its string representation.
*
*
- * @generated
*/
@Override
public String toString() {
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
index c667f106..29ca69c4 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
@@ -55,7 +55,6 @@ public class RMConnectionHelper {
*
* @param username Username
* @param password Password
- * @return The user session ID
* @throws LoginException In case the login is not valid
* @throws KeyException In case the password is not valid
* @throws RMException In case an error happens in the RM
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/SchedulerConnectionHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/SchedulerConnectionHelper.java
index 6efe9c5f..77ee0d9a 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/SchedulerConnectionHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/SchedulerConnectionHelper.java
@@ -60,9 +60,8 @@ public class SchedulerConnectionHelper {
}
/**
- *
* Disconnect from the Scheduler
- *
+ * @return The disconnected Scheduler gateway
*/
public static synchronized RestSmartProxyImpl disconnect() {
try {
--
GitLab
From f4fbf4811a69937d239f3b2af6ac9749bfe9cb98 Mon Sep 17 00:00:00 2001
From: Andreas Tsagkaropoulos
Date: Wed, 5 May 2021 17:57:03 +0000
Subject: [PATCH 24/25] Amq message java library
---
.../brokerclient/templates/EventFields.java | 72 +++++++++++++++
.../brokerclient/templates/TopicNames.java | 91 +++++++++++++++++++
2 files changed, 163 insertions(+)
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/EventFields.java
create mode 100644 amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/TopicNames.java
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/EventFields.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/EventFields.java
new file mode 100644
index 00000000..6859637e
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/EventFields.java
@@ -0,0 +1,72 @@
+package eu.melodic.event.brokerclient.templates;
+
+public class EventFields {
+ /**
+ * This event is used to send realtime metric values
+ */
+ public static class MetricEventFields{
+ public static final String timestamp = "timestamp";
+ public static final String metric_value = "metricValue";
+ public static final String refers_to = "refersTo";
+ public static final String cloud = "cloud";
+ public static final String provider = "provider";
+ }
+ /**
+ * This event is used to send predicted metric values
+ */
+ public static class PredictionMetricEventFields{
+ public static final String timestamp = "timestamp";
+ public static final String level = "level";
+ public static final String metric_value = "metricValue";
+ public static final String refers_to = "refersTo";
+ public static final String cloud = "cloud";
+ public static final String provider = "provider";
+ public static final String probability = "probability";
+ public static final String confidence_interval = "confidence_interval";
+ public static final String prediction_time = "predictionTime";
+ }
+ /**
+ * This event is used to send a list of metric values which should be predicted by forecasting methods. The fields which are included in this event reflect the fields which should be included in each JSON object (this event is a JSON Array consisting of multiple JSON objects)
+ */
+ public static class TranslatorToForecastingMethodsFieldsPerMetric{
+ public static final String metric = "metric";
+ public static final String level = "level";
+ public static final String publish_rate = "publish_rate";
+ }
+ /**
+ * This event is used to indicate that a (re)training of the forecasting method has been performed
+ */
+ public static class ForecastingMethodsToPredictionOrchestratorTrainingEventFields{
+ public static final String metrics = "metrics";
+ public static final String forecasting_method = "forecasting_method";
+ public static final String timestamp = "timestamp";
+ }
+ /**
+ * This event is used to send intermediate predictions generated by individual forecasting methods
+ */
+ public static class ForecastingMethodsToPredictionOrchestratorIntermediatePredictionsFields{
+ public static final String timestamp = "timestamp";
+ public static final String metric_value = "metricValue";
+ public static final String level = "level";
+ public static final String refers_to = "refersTo";
+ public static final String cloud = "cloud";
+ public static final String provider = "provider";
+ public static final String probability = "probability";
+ public static final String confidence_interval = "confidence_interval";
+ public static final String prediction_time = "predictionTime";
+ }
+ /**
+ * This event is used to initiate forecasting of one or more monitoring metrics
+ */
+ public static class PredictionOrchestratorToForecastingMethodsStartForecastingEventFields{
+ public static final String metrics = "metrics";
+ public static final String timestamp = "timestamp";
+ }
+ /**
+ * This event is used to stop forecasting of one or more monitoring metrics
+ */
+ public static class PredictionOrchestratorToForecastingMethodsStopForecastingEventFields{
+ public static final String metrics = "metrics";
+ public static final String timestamp = "timestamp";
+ }
+}
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/TopicNames.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/TopicNames.java
new file mode 100644
index 00000000..94a3b664
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/TopicNames.java
@@ -0,0 +1,91 @@
+package eu.melodic.event.brokerclient.templates;
+
+public class TopicNames {
+
+ /**
+ * The realtime values which are published for a particular metric, by the EMS
+ * @param metric_name The metric for which the monitoring values are desired
+ * @return A String value containing the topic which is used to publish the realtime values of a metric
+ */
+ public static String realtime_metric_values_topic (String metric_name){
+ return metric_name;
+ }
+
+ /**
+ * The final metric predictions which are produced by the Prediction Orchestrator
+ * @param predicted_metric_name The name of the metric for which predictions are published
+ * @return A String value containing the topic with the name of the metric
+ */
+ public static String final_metric_predictions_topic (String predicted_metric_name){
+ return "prediction."+predicted_metric_name;
+ }
+
+ /**
+ * The topic which is used by the translator to inform forecasting methods that a metric should be predicted
+ */
+ public static String translator_to_forecasting_methods_topic = "metrics_to_predict";
+ /**
+ * The topic which is used by forecasting methods to inform the prediction orchestrator that a (re)training of a prediction method has been performed
+ */
+ public static String forecasting_methods_to_prediction_orchestrator_training_topic = "training_models";
+
+
+ /**
+ * The topic which should be used to transmit intermediate forecasts for a particular metric by prediction methods
+ */
+ public static class ForecastingMethodsToPredictionOrchestratorPredictionsTopic{
+ public static String nbeats(String metric_name) {
+ return "intermediate_prediction.nbeats."+metric_name;
+ }
+ public static String es_hybrid(String metric_name) {
+ return "intermediate_prediction.eshybrid."+metric_name;
+ }
+ public static String arima(String metric_name){
+ return "intermediate_prediction.arima." +metric_name;
+ }
+ public static String tsetlin_machines(String metric_name) {
+ return "intermediate_prediction.tsetlinmachines."+metric_name;
+ }
+ public static String exponential_smoothing(String metric_name) {
+ return "intermediate_prediction.exponentialsmoothing."+metric_name;
+ }
+ public static String lstm(String metric_name) {
+ return "intermediate_prediction.lstm."+metric_name;
+ }
+ public static String gluon_machines(String metric_name) {
+ return "intermediate_prediction.gluonmachines."+metric_name;
+ }
+ public static String prophet(String metric_name){
+ return "intermediate_prediction.prophet."+metric_name;
+ }
+ }
+
+
+ /**
+ * The topic which is used to notify a forecasting method to start forecasting one or more metrics
+ */
+ public static class PredictionOrchestratorToForecastingMethodsStartForecastingTopic {
+ public static final String nbeats = "start_forecasting.nbeats";
+ public static final String es_hybrid = "start_forecasting.eshybrid";
+ public static final String arima = "start_forecasting.arima";
+ public static final String tsetlin_machines = "start_forecasting.tsetlinmachines";
+ public static final String exponential_smoothing = "start_forecasting.exponentialsmoothing";
+ public static final String lstm = "start_forecasting.lstm";
+ public static final String gluon_machines = "start_forecasting.gluonmachines";
+ public static final String prophet = "start_forecasting.prophet";
+ }
+
+ /**
+ * The topic which is used to notify a forecasting method to stop forecasting one or more metrics
+ */
+ public static class PredictionOrchestratorToForecastingMethodsStopForecastingTopic {
+ public static final String nbeats = "stop_forecasting.nbeats";
+ public static final String es_hybrid = "stop_forecasting.eshybrid";
+ public static final String arima = "stop_forecasting.arima";
+ public static final String tsetlin_machines = "stop_forecasting.tsetlinmachines";
+ public static final String exponential_smoothing = "stop_forecasting.exponentialsmoothing";
+ public static final String lstm = "stop_forecasting.lstm";
+ public static final String gluon_machines = "stop_forecasting.gluonmachines";
+ public static final String prophet = "stop_forecasting.prophet";
+ }
+}
--
GitLab
From 60907fe5ac4011c938484960318ef7cc1889f643 Mon Sep 17 00:00:00 2001
From: Fotis Paraskevopoulos
Date: Mon, 10 May 2021 16:34:10 +0300
Subject: [PATCH 25/25] Upgrading Python Library
---
amq-message-python-library/Event.py | 420 ++++++++++++++++++
.../MorphemicConnection.py | 6 +-
.../MorphemicListener.py | 29 ++
amq-message-python-library/Payloads.py | 10 +
amq-message-python-library/__init__.py | 5 +-
5 files changed, 464 insertions(+), 6 deletions(-)
create mode 100644 amq-message-python-library/Event.py
create mode 100644 amq-message-python-library/MorphemicListener.py
create mode 100644 amq-message-python-library/Payloads.py
diff --git a/amq-message-python-library/Event.py b/amq-message-python-library/Event.py
new file mode 100644
index 00000000..52dbc842
--- /dev/null
+++ b/amq-message-python-library/Event.py
@@ -0,0 +1,420 @@
+
+
+class Metric(enumerate):
+ """
+ [0] (current/detected) Metrics & SLOs Events Format:
+
+
+ This event is aggregated by EMS and it is persisted in InfluxDB. Moreover,
+ Prediction Orchestrator will subscribe and receive the current metrics in order to
+ evaluate the forecasting methods, according to the defined KPIs (e.g., MAPE)
+
+ * Topic: [metric_name]
+ > (e.g. MaxCPULoad)
+
+
+ {
+ "metricValue": 12.34,
+
+ "level": 1,
+
+ "timestamp": 143532341251,
+
+ "refersTo": "MySQL_12345",
+
+ "cloud": "AWS-Dublin",
+
+ "provider": "AWS"
+
+ }
+
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+ TIMESTAMP = "timestamp"
+ METRIC_VALUE = "metricValue"
+ REFERS_TO = "refersTo"
+ CLOUD = "cloud"
+ PROVIDER = "provider"
+
+
+
+class PredictionMetric(enumerate):
+
+ """
+ [1] Predicted Metrics & SLOs Events Format
+
+
+ This event is produced by the Prediction Orchestrator and reflects the final predicted value for a metric.
+
+ - Topic: prediction.[metric_name]
+ > (e.g. prediction.MaxCPULoad)
+
+
+ {
+ "metricValue": 12.34,
+
+ "level": 1,
+
+ "timestamp": 143532341251,
+
+ "probability": 0.98,
+
+ "confidence_interval " : [8,15]
+
+ "predictionTime": 143532342,
+
+ "refersTo": "MySQL_12345",
+
+ "cloud": "AWS-Dublin",
+
+ "provider": "AWS"
+
+ }
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+
+ _match = "prediction."
+
+ METRICVALUE= "metricValue"
+ '''Predicted metric value'''
+ LEVEL= "level"
+ '''Level of VM where prediction occurred or refers'''
+ TIMESTAMP= "timestamp"
+ '''Prediction creation date/time from epoch'''
+ PROBABILITY= "probability"
+ '''Probability of the predicted metric value (range 0..1)'''
+ CONFIDENCE_INTERVAL= "confidence_interval"
+ '''the probability-confidence interval for the prediction'''
+ PREDICTION_TIME= "predictionTime"
+ '''This refers to time point in the imminent future (that is relative to the time
+ that is needed for reconfiguration) for which the predicted value is considered
+ valid/accurate (in UNIX Epoch)'''
+ REFERSTO= "refersTo"
+ '''The id of the application or component or (VM) host for which the prediction refers to'''
+ CLOUD= "cloud"
+ '''Cloud provider of the VM (with location)'''
+ PROVIDER= "provider"
+ '''Cloud provider name'''
+
+
+
+class MetricsToPredict(enumerate):
+
+ """
+ [2] Translator – to – Forecasting Methods/Prediction Orchestrator Events Format
+
+
+ This event is produced by the translator, to:
+
+ imform Dataset Maker which metrics should subscribe to in order to aggregate the appropriate tanning dataset in the time-series DB.
+ instruct each of the Forecasting methods to predict the values of one or more monitoring metrics
+ inform the Prediction Orchestrator for the metrics which will be forecasted
+
+ * Topic: metrics_to_predict
+
+
+ *Note:* This event could be communicated through Mule
+
+
+ [
+ {
+
+ "metric": "MaxCPULoad",
+
+ "level": 3,
+
+ "publish_rate": 60000,
+
+ },
+
+ {
+
+ "metric": "MinCPULoad",
+
+ "level": 3,
+
+ "publish_rate": 50000,
+
+ }
+
+ ]
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+
+ _match = "metrics_to_predict"
+
+ METRIC = "metric"
+ '''name of the metric to predict'''
+ LEVEL = "level"
+ '''Level of monitoring topology where this metric may be produced/found'''
+ PUBLISH_RATE = "publish_rate"
+ '''expected rate for datapoints regarding the specific metric (according to CAMEL)'''
+
+
+class TraningModels(enumerate):
+ """
+
+ [3] Forecasting Methods – to – Prediction Orchestrator Events Format
+
+
+ This event is produced by each of the Forecasting methods, to inform the
+ Prediction Orchestrator that the method has (re-)trained its model for one or more metrics.
+
+ * Topic: training_models
+
+
+ {
+
+ "metrics": ["MaxCPULoad","MinCPULoad"]",
+
+ "forecasting_method": "ESHybrid",
+
+ "timestamp": 143532341251,
+
+ }
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+ _match = "training_models"
+
+ METRICS = "metrics"
+ '''metrics for which a certain forecasting method has successfully trained or re-trained its model'''
+ FORECASTING_METHOD = "forecasting_method"
+ '''the method that is currently re-training its models'''
+ TIMESTAMP = "timestamp"
+ '''date/time of model(s) (re-)training'''
+
+
+class IntermediatePrediction(enumerate):
+ """
+
+ [4] Forecasting Methods – to – Prediction Orchestrator Events Format
+
+
+ This event is produced by each of the Forecasting methods, and is used by the Prediction Orchestrator to determine the final prediction value for the particular metric.
+
+
+ * Topic: intermediate_prediction.[forecasting_method].[metric_name]
+ * (e.g. intermediate_prediction.ESHybrid.MaxCPULoad)
+ * We note that any component will be able to subscribe to topics like:
+ * intermediate_prediction.*.MaxCPULoad → gets MaxCPULoad predictions produced by all forecasting methods or
+ * intermediate_prediction.ESHybrid.* → gets all metrics predictions from ESHybrid method
+ * We consider that each forecasting method publishes a static (but configurable) number m of predicted values (under the same timestamp) for time points into the future. These time points into the future are relevant to the reconfiguration time that it is needed (and can also be updated).
+ * For example if we configure m=5 predictions into the future and the reconfiguration time needed is TR=10 minutes, then at t0 a forecasting method publishes 5 events with the same timestamp and prediction times t0+10, t0+20, t0+30, t0+40, t0+50.
+
+
+
+ {
+ "metricValue": 12.34,
+
+ "level": 3,
+
+ "timestamp": 143532341251,
+
+ "probability": 0.98,
+
+ "confidence_interval " : [8,15]
+
+ "predictionTime": 143532342,
+
+ "refersTo": "MySQL_12345",
+
+ "cloud": "AWS-Dublin",
+
+ "provider": "AWS"
+
+ }
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+
+ _match="intermediate_prediction."
+
+ METRICVALUE = "metricValue"
+ '''Predicted metric value (more than one such events will be produced for different time points into the future – this can be valuable to the Prediction Orchestrator in certain situations e.g., forecasting method is unreachable for a time period)'''
+
+ LEVEL = "level"
+ '''Level of VM where prediction occurred or refers'''
+
+ TIMESTAMP = "timestamp"
+ '''Prediction creation date/time from epoch'''
+
+ PROBABILITY = "probability"
+ '''Probability of the predicted metric value (range 0..1)'''
+
+ CONFIDENCE_INTERVAL = "confidence_interval"
+ '''the probability-confidence interval for the prediction'''
+
+ PREDICTION_TIME = "predictionTime"
+ '''This refers to time point in the imminent future (that is relative to the time that is needed for reconfiguration) for which the predicted value is considered valid/accurate (in UNIX Epoch)'''
+
+ REFERS_TO = "refersTo"
+ '''The id of the application or component or (VM) host for which the prediction refers to'''
+
+ CLOUD = "cloud"
+ '''Cloud provider of the VM (with location)'''
+
+ PROVIDER = "provider"
+ '''Cloud provider name'''
+
+
+
+class Prediction(enumerate):
+ """
+
+ [5] Prediction Orchestrator – to – Severity-based SLO Violation Detector Events Format
+
+
+ This event is used by the Prediction Orchestrator to inform the SLO Violation Detector about the current values of a metric, which can possibly lead to an SLO Violation detection.
+
+ * Topic: prediction.[metric_name]
+ * (e.g. prediction.MaxCPULoad)
+
+
+ {
+ "metricValue": 12.34,
+
+ "level": 1,
+
+ "timestamp": 143532341251,
+
+ "probability": 0.98,
+
+ "confidence_interval " : [8,15]
+
+ "predictionTime": 143532342,
+
+ "refersTo": "MySQL_12345",
+
+ "cloud": "AWS-Dublin",
+
+ "provider": "AWS"
+
+ }
+
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+
+ """
+
+ _match = "prediction."
+
+ METRICVALUE = "metricValue"
+ '''Predicted metric value'''
+
+ LEVEL = "level"
+ '''Level of VM where prediction occurred or refers'''
+
+ TIMESTAMP = "timestamp"
+ '''Prediction creation date/time from epoch'''
+
+ PROBABILITY = "probability"
+ '''Probability of the predicted metric value (range 0..1)'''
+
+ CONFIDENCE_INTERVAL = "confidence_interval"
+ '''the probability-confidence interval for the prediction'''
+
+ PREDICTIONTIME = "predictionTime"
+ '''This refers to time point in the imminent future (that is relative to the time that is needed for reconfiguration) for which the predicted value is considered valid/accurate (in UNIX Epoch)'''
+
+ REFERSTO = "refersTo"
+ '''The id of the application or component or (VM) host for which the prediction refers to'''
+
+ CLOUD = "cloud"
+ '''Cloud provider of the VM (with location)'''
+
+ PROVIDER = "provider"
+ '''Cloud provider name'''
+
+
+class StopForecasting(enumerate):
+ """
+ [6] Prediction Orchestrator – to – Forecasting Methods Events Format
+
+
+ This event is used by the Prediction Orchestrator to instruct a forecasting method to stop producing predicted values for a selection of metrics.
+
+
+ * Topic: stop_forecasting.[forecasting_method]
+ * Each component that implements a specific forecasting method it should subscribe to its relevant topic (e.g. the ES-Hybrid component should subscribe to stop_forecasting.eshybrid topic)
+
+
+ {
+ "metrics": ["MaxCPULoad","MinCPULoad"],
+ "timestamp": 143532341251,
+ }
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+
+ """
+
+ _match="stop_forecasting."
+
+ METRICS = "metrics"
+ '''metrics for which a certain method should stop producing predictions (because of poor results)'''
+ TIMESTAMP = "timestamp"
+ '''date/time of the command of the orchestrator'''
+
+
+class StartForecasting(enumerate):
+ """
+
+ [7] Prediction Orchestrator – to – Forecasting Methods Events Format
+
+ This event is used by the Prediction Orchestrator to instruct a forecasting method to start producing predicted values for a selection of metrics.
+
+
+ * Topic: start_forecasting.[forecasting_method]
+ * Each component that implements a specific forecasting method it should subscribe to its relevant topic (e.g. the ES-Hybrid component should subscribe to start_forecasting.eshybrid topic)
+ * We consider that each forecasting method should publish a static (but configurable) number m of predicted values (under the same timestamp) for time points into the future. These time points into the future are relevant to the reconfiguration time that it is needed (and can also be updated).
+ * For example if we configure m=5 predictions into the future and the reconfiguration time needed is TR=10 minutes, then at t0 a forecasting method publishes 5 events with the same timestamp and prediction times t0+10, t0+20, t0+30, t0+40, t0+50.
+
+
+
+
+ {
+ "metrics": ["MaxCPULoad","MinCPULoad"],
+
+ "timestamp": 143532341251,
+
+ "epoch_start": 143532341252,
+
+ "number_of_forward_predictions": 5,
+
+ "prediction_horizon": 600
+
+ }
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+
+ """
+
+ _match="start_forecasting."
+
+ METRICS = "metrics"
+ '''metrics for which a certain method should start producing predictions'''
+ TIMESTAMP = "timestamp"
+ '''date/time of the command of the orchestrator'''
+ EPOCH_START = "epoch_start"
+ '''this time refers to the start time after which all predictions will be considered (i.e. t0)'''
+ NUMBER_OF_FORWARD_PREDICTIONS = "number_of_forward_predictions"
+ ''' this is a number that indicates how many time points into the future do we need predictions for.'''
+ PREDICTION_HORIZON = "prediction_horizon"
+ '''This time equals to the time (in seconds) that is needed for the platform to implement an application reconfiguration (i.e. TR).'''
\ No newline at end of file
diff --git a/amq-message-python-library/MorphemicConnection.py b/amq-message-python-library/MorphemicConnection.py
index d67a179f..59f8dd0f 100644
--- a/amq-message-python-library/MorphemicConnection.py
+++ b/amq-message-python-library/MorphemicConnection.py
@@ -29,7 +29,7 @@ class Connection:
def subscribe(self,destination, id, ack='auto'):
- ref = next((item for item in self.subscriptions if item['id'] == id), None)
+ ref = next((item for item in self.subscriptions if item['destination'] == destination and item['id'] == id), None)
if not ref:
self.subscriptions.append(
@@ -46,8 +46,6 @@ class Connection:
def queue(self,destination, id, ack='auto'):
self.subscribe("/queue/%s" % destination,id,ack)
- def is_topic(self,headers, topic):
- return headers.get('destination') == '/topic/metrics_to_predict'
def unsubscribe(self, id):
if not self.conn:
@@ -65,9 +63,7 @@ class Connection:
return
self.conn.connect(self.username, self.password, wait=wait)
-
for s in self.subscriptions:
-
self.conn.subscribe(s['destination'], s['id'], s['ack'])
diff --git a/amq-message-python-library/MorphemicListener.py b/amq-message-python-library/MorphemicListener.py
new file mode 100644
index 00000000..1317bf76
--- /dev/null
+++ b/amq-message-python-library/MorphemicListener.py
@@ -0,0 +1,29 @@
+
+from stomp.listener import ConnectionListener
+import logging
+import json
+
+class MorphemicListener(ConnectionListener):
+
+ def is_topic(self,headers, event):
+ if not hasattr(event,"_match"):
+ return False
+ match = getattr(event,'_match')
+ return headers.get('destination').startswith(match)
+
+ def _unknown_message(self,body):
+ logging.debug("Unknown message %s ",body)
+ pass
+
+
+ def on_message(self, headers, body):
+
+ logging.debug("Headers %s",headers)
+ logging.debug(" %s",body)
+
+ func_name='on_%s' % headers.get('destination').replace('/topic/','')
+ if hasattr(self,func_name):
+ func = getattr(self, func_name)
+ func(json.loads(body))
+ else:
+ self._unknown_message(body)
diff --git a/amq-message-python-library/Payloads.py b/amq-message-python-library/Payloads.py
new file mode 100644
index 00000000..5de1adc8
--- /dev/null
+++ b/amq-message-python-library/Payloads.py
@@ -0,0 +1,10 @@
+
+class MetricsToPredict:
+
+
+ def load(self,body):
+ self.metrics = body["metrics"]
+ self.timestamp = body["timestamp"]
+ self.epoch_start = body["epoch_start"]
+ self.number_of_forward_predictions = body["number_of_forward_predictions"]
+ self.prediction_horizon = body["prediction_horizon"]
diff --git a/amq-message-python-library/__init__.py b/amq-message-python-library/__init__.py
index 21f52880..45fe25b1 100644
--- a/amq-message-python-library/__init__.py
+++ b/amq-message-python-library/__init__.py
@@ -1,2 +1,5 @@
-from . import MorphemicConnection as morphemic
\ No newline at end of file
+from . import MorphemicConnection as morphemic
+from . import MorphemicListener as listener
+from . import Event as events
+from . import Payloads as payloads
\ No newline at end of file
--
GitLab