diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index eb762bcea3c14d65e1c39b11a02d734eb3dd88a6..4f2ac7250413fc06ba67e59495fa49406777d19f 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,9 +2,14 @@
variables:
MAVEN_IMAGE: "maven:3.5.2-jdk-8"
-
LOCAL_REPO: "127.0.0.1:5000"
+ DOCKER_REPO: "gitlab.ow2.org:4567"
+ DOCKER_DIND_IMAGE: "docker:19.03.1"
+ DOCKER_DIND_SERVICE: "$DOCKER_DIND_IMAGE-dind"
+ DOCKER_DRIVER: overlay
+ DOCKER_TLS_CERTDIR: "/certs"
+
SCHEDULING_ABSTRACTION_LAYER_CLI: "mvn -DskipTests --batch-mode -f scheduling-abstraction-layer/pom.xml"
cache:
@@ -13,7 +18,7 @@ cache:
before_script:
- echo '=========================================================================='
- - echo $SCHEDULING_ABSTRACTION_LAYER_CLI
+ - echo $MORPHEMIC_PREPROCESSOR_LAYER_CLI
- echo '=========================================================================='
- mkdir -p $HOME/.m2
- echo '
+
+ 4.0.0
+
+
+ org.springframework.boot
+ spring-boot-starter-parent
+ 2.4.4
+
+
+ amq-message-java-library
+ AMQ message Java library
+ gr.ntua.imu.morphemic
+ 1.0.0
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-activemq
+
+
+ org.apache.activemq
+ activemq-broker
+
+
+
+
+ org.projectlombok
+ lombok
+ provided
+
+
+
+
+ org.apache.commons
+ commons-lang3
+ 3.8.1
+
+
+
+
+
+ eu.7bulls
+ Melodic 7bulls repository
+ https://nexus.7bulls.eu:8443/repository/maven-snapshots/
+
+
+ eu.7bulls
+ Melodic 7bulls repository
+ https://nexus.7bulls.eu:8443/repository/maven-releases/
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.2.4
+
+
+ package
+
+ shade
+
+
+
+
+
+
+
+
+
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClient.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClient.java
new file mode 100644
index 0000000000000000000000000000000000000000..373c9f6396c4b1e1d5d940fdbee5e2f4d1add6f3
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClient.java
@@ -0,0 +1,345 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient;
+
+import eu.melodic.event.brokerclient.event.EventMap;
+import eu.melodic.event.brokerclient.properties.BrokerClientProperties;
+import java.io.Serializable;
+import java.util.*;
+import javax.jms.*;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.activemq.ActiveMQConnection;
+import org.apache.activemq.ActiveMQConnectionFactory;
+import org.apache.activemq.ActiveMQSslConnectionFactory;
+import org.apache.activemq.advisory.DestinationSource;
+import org.apache.activemq.command.ActiveMQQueue;
+import org.apache.activemq.command.ActiveMQTempQueue;
+import org.apache.activemq.command.ActiveMQTempTopic;
+import org.apache.activemq.command.ActiveMQTopic;
+import org.apache.commons.lang3.StringUtils;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Component;
+
+@Slf4j
+@Component
+public class BrokerClient {
+
+ @Autowired
+ private BrokerClientProperties properties;
+ private Connection connection;
+ private Session session;
+ private HashMap listeners = new HashMap<>();
+
+ public BrokerClient() {
+ }
+
+ public BrokerClient(BrokerClientProperties bcp) {
+ properties = bcp;
+ }
+
+ public BrokerClient(Properties p) {
+ properties = new BrokerClientProperties(p);
+ }
+
+ // ------------------------------------------------------------------------
+
+ public static BrokerClient newClient(String broker_properties_configuration_file_location) throws java.io.IOException, JMSException {
+ log.info("BrokerClient: Initializing...");
+
+ /*
+ // get properties file
+ String configDir = System.getenv("MELODIC_CONFIG_DIR");
+ if (configDir == null || configDir.trim().isEmpty()) configDir = ".";
+ log.info("BrokerClient: config-dir: {}", configDir);
+ String configPropFile = configDir + "/" + "eu.melodic.event.brokerclient.properties";
+ log.info("BrokerClient: config-file: {}", configPropFile);
+ */
+
+ // load properties
+ Properties p = new Properties();
+ //ClassLoader loader = Thread.currentThread().getContextClassLoader();
+ //try (java.io.InputStream in = loader.getClass().getResourceAsStream(configPropFile)) { p.load(in); }
+ try (java.io.InputStream in = new java.io.FileInputStream(broker_properties_configuration_file_location)) {
+ p.load(in);
+ }
+ log.info("BrokerClient: config-properties: {}", p);
+
+ // initialize broker client
+ BrokerClient client = new BrokerClient(p);
+ log.info("BrokerClient: Configuration:\n{}", client.properties);
+
+ return client;
+ }
+
+ public static BrokerClient newClient(String username, String password, String broker_properties_configuration_file_path) throws java.io.IOException, JMSException {
+ BrokerClient client = newClient(broker_properties_configuration_file_path);
+ if (username!=null && password!=null) {
+ client.getClientProperties().setBrokerUsername(username);
+ client.getClientProperties().setBrokerPassword(password);
+ }
+ return client;
+ }
+
+ public static BrokerClient newClient() {
+ return new BrokerClient();
+ }
+
+ // ------------------------------------------------------------------------
+
+ public BrokerClientProperties getClientProperties() {
+ checkProperties();
+ return properties;
+ }
+
+ protected void checkProperties() {
+ if (properties==null) {
+ //use defaults
+ properties = new BrokerClientProperties();
+ }
+ }
+
+ // ------------------------------------------------------------------------
+
+ public synchronized Set getDestinationNames(String connectionString) throws JMSException {
+ // open or reuse connection
+ checkProperties();
+ boolean _closeConn = false;
+ if (session==null) {
+ openConnection(connectionString);
+ _closeConn = ! properties.isPreserveConnection();
+ }
+
+ // Get destinations from Broker
+ log.info("BrokerClient.getDestinationNames(): Getting destinations: connection={}, username={}", connectionString, properties.getBrokerUsername());
+ ActiveMQConnection conn = (ActiveMQConnection)connection;
+ DestinationSource ds = conn.getDestinationSource();
+ Set queues = ds.getQueues();
+ Set topics = ds.getTopics();
+ Set tempQueues = ds.getTemporaryQueues();
+ Set tempTopics = ds.getTemporaryTopics();
+ log.info("BrokerClient.getDestinationNames(): Getting destinations: done");
+
+ // Get destination names
+ HashSet destinationNames = new HashSet<>();
+ for (ActiveMQQueue q : queues) destinationNames.add("QUEUE "+q.getQueueName());
+ for (ActiveMQTopic t : topics) destinationNames.add("TOPIC "+t.getTopicName());
+ for (ActiveMQTempQueue tq : tempQueues) destinationNames.add("Temp QUEUE "+tq.getQueueName());
+ for (ActiveMQTempTopic tt : tempTopics) destinationNames.add("Temp TOPIC "+tt.getTopicName());
+
+ // close connection
+ if (_closeConn) {
+ closeConnection();
+ }
+
+ return destinationNames;
+ }
+
+ // ------------------------------------------------------------------------
+
+ public synchronized void publishEvent(String connectionString, String destinationName, Map eventMap) throws JMSException {
+ _publishEvent(connectionString, destinationName, new EventMap(eventMap));
+ }
+
+ protected synchronized void _publishEvent(String connectionString, String destinationName, Serializable event) throws JMSException {
+ _publishEvent(connectionString,destinationName,event,false);
+ }
+ protected synchronized void _publishEvent(String connectionString, String destinationName, Serializable event, boolean persistent_connection_demanded) throws JMSException {
+ // open or reuse connection
+ checkProperties();
+ boolean _closeConn = false;
+ if (session==null) {
+ openConnection(connectionString);
+ _closeConn = ! properties.isPreserveConnection();
+ }
+ if (persistent_connection_demanded){
+ _closeConn = false;
+ }
+
+ // Create the destination (Topic or Queue)
+ //Destination destination = session.createQueue( destinationName );
+ Destination destination = session.createTopic(destinationName);
+
+ // Create a MessageProducer from the Session to the Topic or Queue
+ MessageProducer producer = session.createProducer(destination);
+ producer.setDeliveryMode(javax.jms.DeliveryMode.NON_PERSISTENT);
+
+ // Create a messages
+ //ObjectMessage message = session.createObjectMessage(event);
+ TextMessage message = session.createTextMessage(event.toString());
+
+ // Tell the producer to send the message
+ long hash = message.hashCode();
+ log.info("BrokerClient.publishEvent(): Sending message: connection={}, username={}, destination={}, hash={}, payload={}", connectionString, properties.getBrokerUsername(), destinationName, hash, event);
+ producer.send(message);
+ log.info("BrokerClient.publishEvent(): Message sent: connection={}, username={}, destination={}, hash={}, payload={}", connectionString, properties.getBrokerUsername(), destinationName, hash, event);
+
+ // close connection
+ if (_closeConn) {
+ closeConnection();
+ }
+ }
+
+ // ------------------------------------------------------------------------
+
+ public void subscribe(String connectionString, String destinationName, MessageListener listener) throws JMSException {
+ // Create or open connection
+ checkProperties();
+ if (session==null) {
+ openConnection(connectionString);
+ }
+
+ // Create the destination (Topic or Queue)
+ log.info("BrokerClient: Subscribing to destination: {}...", destinationName);
+ //Destination destination = session.createQueue( destinationName );
+ Destination destination = session.createTopic(destinationName);
+
+ // Create a MessageConsumer from the Session to the Topic or Queue
+ MessageConsumer consumer = session.createConsumer(destination);
+ consumer.setMessageListener(listener);
+ listeners.put(listener, consumer);
+ }
+
+ public void unsubscribe(MessageListener listener) throws JMSException {
+ MessageConsumer consumer = listeners.get(listener);
+ if (consumer!=null) {
+ consumer.close();
+ }
+ }
+
+ // ------------------------------------------------------------------------
+
+ public void receiveEvents(String connectionString, String destinationName, MessageListener listener) throws JMSException {
+ checkProperties();
+ MessageConsumer consumer = null;
+ boolean _closeConn = false;
+ try {
+ // Create or open connection
+ if (session==null) {
+ openConnection(connectionString);
+ _closeConn = ! properties.isPreserveConnection();
+ }
+
+ // Create the destination (Topic or Queue)
+ log.info("BrokerClient: Subscribing to destination: {}...", destinationName);
+ //Destination destination = session.createQueue( destinationName );
+ Destination destination = session.createTopic(destinationName);
+
+ // Create a MessageConsumer from the Session to the Topic or Queue
+ consumer = session.createConsumer(destination);
+
+ // Wait for messages
+ log.info("BrokerClient: Waiting for messages...");
+ while (true) {
+ Message message = consumer.receive();
+ listener.onMessage(message);
+ }
+
+ } finally {
+ // Clean up
+ log.info("BrokerClient: Closing connection...");
+ if (consumer != null) consumer.close();
+ if (_closeConn) {
+ closeConnection();
+ }
+ }
+ }
+
+ // ------------------------------------------------------------------------
+
+ public ActiveMQConnectionFactory createConnectionFactory() {
+ // Create connection factory based on Broker URL scheme
+ checkProperties();
+ final ActiveMQConnectionFactory connectionFactory;
+ String brokerUrl = properties.getBrokerUrl();
+ if (brokerUrl.startsWith("ssl")) {
+ log.info("BrokerClient.createConnectionFactory(): Creating new SSL connection factory instance: url={}", brokerUrl);
+ final ActiveMQSslConnectionFactory sslConnectionFactory = new ActiveMQSslConnectionFactory(brokerUrl);
+ try {
+ sslConnectionFactory.setTrustStore(properties.getTruststoreFile());
+ sslConnectionFactory.setTrustStoreType(properties.getTruststoreType());
+ sslConnectionFactory.setTrustStorePassword(properties.getTruststorePassword());
+ sslConnectionFactory.setKeyStore(properties.getKeystoreFile());
+ sslConnectionFactory.setKeyStoreType(properties.getKeystoreType());
+ sslConnectionFactory.setKeyStorePassword(properties.getKeystorePassword());
+ //sslConnectionFactory.setKeyStoreKeyPassword( properties........ );
+
+ connectionFactory = sslConnectionFactory;
+ } catch (final Exception theException) {
+ throw new Error(theException);
+ }
+ } else {
+ log.info("BrokerClient.createConnectionFactory(): Creating new non-SSL connection factory instance: url={}", brokerUrl);
+ connectionFactory = new ActiveMQConnectionFactory(brokerUrl);
+ }
+
+ // Other connection factory settings
+ //connectionFactory.setSendTimeout(....5000L);
+ //connectionFactory.setTrustedPackages(Arrays.asList("eu.melodic.event"));
+ connectionFactory.setTrustAllPackages(true);
+ connectionFactory.setWatchTopicAdvisories(true);
+
+ return connectionFactory;
+ }
+
+ // ------------------------------------------------------------------------
+
+ public synchronized void openConnection() throws JMSException {
+ checkProperties();
+ openConnection(properties.getBrokerUrl(), null, null);
+ }
+
+ public synchronized void openConnection(String connectionString) throws JMSException {
+ openConnection(connectionString, null, null);
+ }
+
+ public synchronized void openConnection(String connectionString, String username, String password) throws JMSException {
+ openConnection(connectionString, username, password, properties.isPreserveConnection());
+ }
+
+ public synchronized void openConnection(String connectionString, String username, String password, boolean preserveConnection) throws JMSException {
+ checkProperties();
+ if (connectionString == null) connectionString = properties.getBrokerUrl();
+ log.debug("BrokerClient: Credentials provided as arguments: username={}, password={}", username, password);
+ if (StringUtils.isBlank(username)) {
+ username = properties.getBrokerUsername();
+ password = properties.getBrokerPassword();
+ log.debug("BrokerClient: Credentials read from properties: username={}, password={}", username, password);
+ }
+
+ // Create connection factory
+ ActiveMQConnectionFactory connectionFactory = createConnectionFactory();
+ connectionFactory.setBrokerURL(connectionString);
+ if (StringUtils.isNotBlank(username) && password != null) {
+ connectionFactory.setUserName(username);
+ connectionFactory.setPassword(password);
+ }
+ log.debug("BrokerClient: Connection credentials: username={}, password={}", username, password);
+
+ // Create a Connection
+ log.info("BrokerClient: Connecting to broker: {}...", connectionString);
+ Connection connection = connectionFactory.createConnection();
+ connection.start();
+
+ // Create a Session
+ log.info("BrokerClient: Opening session...");
+ Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
+
+ this.connection = connection;
+ this.session = session;
+ }
+
+ public synchronized void closeConnection() throws JMSException {
+ // Clean up
+ session.close();
+ connection.close();
+ session = null;
+ connection = null;
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClientApp.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClientApp.java
new file mode 100644
index 0000000000000000000000000000000000000000..0448fe2cd58bb6244b5a525c9f566484cb2dbdef
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerClientApp.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient;
+
+import eu.melodic.event.brokerclient.event.EventGenerator;
+import eu.melodic.event.brokerclient.event.EventMap;
+import javax.jms.*;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.HashMap;
+import java.util.Map;
+
+@Slf4j
+public class BrokerClientApp {
+
+ public static void main(String args[]) throws java.io.IOException, JMSException {
+ if (args.length==0) {
+ usage();
+ return;
+ }
+
+ int aa=0;
+ String command = args[aa++];
+
+ String username = args.length>aa && args[aa].startsWith("-U") ? args[aa++].substring(2) : null;
+ String password = username!=null && args.length>aa && args[aa].startsWith("-P") ? args[aa++].substring(2) : null;
+ if (StringUtils.isNotBlank(username) && password == null) {
+ password = new String(System.console().readPassword("Enter broker password: "));
+ }
+ String broker_properties_configuration_file_location = args.length>aa && args[aa].startsWith("-C") ? args[aa++].substring(2) : null;
+ if (broker_properties_configuration_file_location == null){
+ broker_properties_configuration_file_location = new String(System.console().readLine());
+ }
+
+ // list destinations
+ if ("list".equalsIgnoreCase(command)) {
+ String url = args[aa++];
+ log.info("BrokerClientApp: Listing destinations:");
+ BrokerClient client = BrokerClient.newClient(username, password,broker_properties_configuration_file_location);
+ client.getDestinationNames(url).stream().forEach(d -> log.info(" {}", d));
+ } else
+ // send an event
+ if ("publish".equalsIgnoreCase(command)) {
+ String url = args[aa++];
+ String topic = args[aa++];
+ String value = args[aa++];
+ String level = args[aa++];
+ EventMap event = new EventMap(Double.parseDouble(value), Integer.parseInt(level), System.currentTimeMillis());
+ log.info("BrokerClientApp: Publishing event: {}", event);
+ BrokerClient client = BrokerClient.newClient(username, password,broker_properties_configuration_file_location);
+ client.publishEvent(url, topic, event);
+ } else
+ //publish an event with custom information
+ if ("custom_publish".equalsIgnoreCase(command)) {
+ Map data = new HashMap<>();
+ String url = args[aa++];
+ String topic = args[aa++];
+ while (aa+1 [-P]] ");
+ log.info("BrokerClientApp: client publish [-U [-P]] ");
+ log.info("BrokerClientApp: client receive [-U [-P]] ");
+ log.info("BrokerClientApp: client subscribe [-U [-P]] ");
+ log.info("BrokerClientApp: client generator [-U [-P]] ");
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerPublisher.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerPublisher.java
new file mode 100644
index 0000000000000000000000000000000000000000..3bc697749777b4c5c1854fed5461947faa8c2ca2
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerPublisher.java
@@ -0,0 +1,61 @@
+package eu.melodic.event.brokerclient;
+
+import lombok.extern.slf4j.Slf4j;
+
+import javax.jms.JMSException;
+import java.io.IOException;
+import java.util.Map;
+
+@Slf4j
+public class BrokerPublisher {
+
+ private String topic;
+ private String url;
+ private String username;
+ private String password;
+ private String broker_configuration_file_location;
+ private BrokerClient client = null;
+
+ public BrokerPublisher(String topic, String url, String username, String password,String broker_configuration_file_location){
+ this.topic = topic;
+ this.url = url;
+ this.username = username;
+ this.password = password;
+ this.broker_configuration_file_location = broker_configuration_file_location;
+ }
+
+ public void publish(Map event_map) {
+ try {
+ log.info("BrokerClientApp: Publishing to topic: {}", topic);
+ if (client==null) {
+ client = BrokerClient.newClient(username, password, broker_configuration_file_location);
+ }
+ client.publishEvent(url, topic, event_map);
+ }catch (IOException | JMSException i){
+ i.printStackTrace();
+ }
+ }
+
+ public void publish(String s) {
+ try {
+ log.info("BrokerClientApp: Publishing to topic: {}", topic);
+ if(client == null) {
+ client = BrokerClient.newClient(username, password, broker_configuration_file_location);
+ }
+ client._publishEvent(url, topic, s);
+ }catch (IOException | JMSException i){
+ i.printStackTrace();
+ }
+ }
+ public void publish(String s,boolean persistent_connection_demanded) {
+ try {
+ log.info("BrokerClientApp: Publishing to topic: {}", topic);
+ if (client==null) {
+ client = BrokerClient.newClient(username, password, broker_configuration_file_location);
+ }
+ client._publishEvent(url, topic, s,persistent_connection_demanded);
+ }catch (IOException | JMSException i){
+ i.printStackTrace();
+ }
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerSubscriber.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerSubscriber.java
new file mode 100644
index 0000000000000000000000000000000000000000..19351cb7197445521a1b513cf1362e56d59eea57
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/BrokerSubscriber.java
@@ -0,0 +1,46 @@
+package eu.melodic.event.brokerclient;
+
+import lombok.extern.slf4j.Slf4j;
+
+import javax.jms.*;
+import java.io.IOException;
+import java.util.function.BiFunction;
+
+
+@Slf4j
+public class BrokerSubscriber {
+
+ private String topic;
+ private String url;
+ private String username;
+ private String password;
+ private String broker_configuration_file_location;
+ private BrokerClient client = null;
+
+ public BrokerSubscriber(String topic, String url, String username, String password, String broker_configuration_file_location){
+ this.topic = topic;
+ this.url = url;
+ this.username = username;
+ this.password = password;
+ this.broker_configuration_file_location = broker_configuration_file_location;
+ }
+
+
+ public void subscribe(BiFunction function) {
+ try {
+ log.info("BrokerClientApp: Subscribing to topic: {}", topic);
+ BrokerClient client = BrokerClient.newClient(username, password, broker_configuration_file_location);
+ client.receiveEvents(url, topic, message -> {
+ try {
+ function.apply(topic,((TextMessage) message).getText());
+ } catch (JMSException j) {
+ log.info("Shutting down subscriber...");
+ j.printStackTrace();
+ }
+ });
+ }catch (IOException | JMSException i){
+ i.printStackTrace();
+ }
+ }
+
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventGenerator.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventGenerator.java
new file mode 100644
index 0000000000000000000000000000000000000000..4aab576df18732103fd46935a14be3078c7eeb25
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventGenerator.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient.event;
+
+import eu.melodic.event.brokerclient.BrokerClient;
+import lombok.Data;
+import lombok.extern.slf4j.Slf4j;
+
+@Data
+@Slf4j
+public class EventGenerator implements Runnable {
+ private BrokerClient client;
+ private String brokerUrl;
+ private String destinationName;
+ private long interval;
+ private long howmany = -1;
+ private double lowerValue;
+ private double upperValue;
+ private int level;
+
+ private transient boolean keepRunning;
+
+ public void start() {
+ if (keepRunning) return;
+ Thread runner = new Thread(this);
+ runner.setDaemon(true);
+ runner.start();
+ }
+
+ public void stop() {
+ keepRunning = false;
+ }
+
+ public void run() {
+ log.info("EventGenerator.run(): Start sending events: event-generator: {}", this);
+
+ keepRunning = true;
+ double valueRangeWidth = upperValue - lowerValue;
+ long countSent = 0;
+ while (keepRunning) {
+ try {
+ double newValue = Math.random() * valueRangeWidth + lowerValue;
+ EventMap event = new EventMap(newValue, level, System.currentTimeMillis());
+ log.info("EventGenerator.run(): Sending event #{}: {}", countSent + 1, event);
+ client.publishEvent(brokerUrl, destinationName, event);
+ countSent++;
+ if (countSent == howmany) keepRunning = false;
+ log.info("EventGenerator.run(): Event sent #{}: {}", countSent, event);
+ } catch (Exception ex) {
+ log.warn("EventGenerator.run(): WHILE-EXCEPTION: {}", ex);
+ }
+ // sleep for 'interval' ms
+ try {
+ if (keepRunning) {
+ Thread.sleep(interval);
+ }
+ } catch (InterruptedException ex) {
+ log.warn("EventGenerator.run(): Sleep interrupted");
+ }
+ }
+
+ log.info("EventGenerator.run(): Stop sending events: event-generator: {}", this);
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventMap.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventMap.java
new file mode 100644
index 0000000000000000000000000000000000000000..a180508ead865c0767623296e7df3a231c6004d0
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/event/EventMap.java
@@ -0,0 +1,42 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient.event;
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+import lombok.Getter;
+import lombok.extern.slf4j.Slf4j;
+
+@Getter
+@Slf4j
+public class EventMap extends HashMap implements Serializable {
+ public EventMap() {
+ super();
+ }
+
+ public EventMap(Map map) {
+ super(map);
+ }
+
+ public EventMap(double metricValue, int level, long timestamp) {
+ put("metricValue", metricValue);
+ put("level", level);
+ put("timestamp", timestamp);
+ }
+
+ public static String[] getPropertyNames() {
+ return new String[]{"metricValue", "level", "timestamp"};
+ }
+
+ public static Class[] getPropertyClasses() {
+ return new Class[]{Double.class, Integer.class, Long.class};
+ }
+}
\ No newline at end of file
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/properties/BrokerClientProperties.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/properties/BrokerClientProperties.java
new file mode 100644
index 0000000000000000000000000000000000000000..25974e81992ef3bd8a63aa50285a5d5cb1fe4a0d
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/properties/BrokerClientProperties.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
+ *
+ * This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
+ * Esper library is used, in which case it is subject to the terms of General Public License v2.0.
+ * If a copy of the MPL was not distributed with this file, you can obtain one at
+ * https://www.mozilla.org/en-US/MPL/2.0/
+ */
+
+package eu.melodic.event.brokerclient.properties;
+
+import lombok.Data;
+import lombok.ToString;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Value;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
+import org.springframework.context.annotation.PropertySource;
+
+@Data
+@ToString(exclude = {"truststorePassword", "keystorePassword", "brokerPassword"})
+@Configuration
+@ConfigurationProperties(prefix = "brokerclient")
+@PropertySource("file:${MELODIC_CONFIG_DIR}/eu.melodic.event.brokerclient.properties")
+@Slf4j
+public class BrokerClientProperties {
+ @Value("${broker-name:broker}")
+ private String brokerName;
+ @Value("${broker-url:ssl://localhost:61616}")
+ private String brokerUrl;
+ @Value("${broker-url-properties:}")
+ private String brokerUrlProperties;
+ @Value("${ssl.client-auth.required:false}")
+ private boolean clientAuthRequired;
+ @Value("${connector-port:-1}")
+ private int connectorPort;
+ @Value("${preserve-connection:false}")
+ private boolean preserveConnection;
+
+ @Value("${ssl.truststore.file:}")
+ private String truststoreFile;
+ @Value("${ssl.truststore.type:}")
+ private String truststoreType;
+ @Value("${ssl.truststore.password:}")
+ private String truststorePassword;
+ @Value("${ssl.keystore.file:}")
+ private String keystoreFile;
+ @Value("${ssl.keystore.type:}")
+ private String keystoreType;
+ @Value("${ssl.keystore.password:}")
+ private String keystorePassword;
+
+ @Value("${broker-username:}")
+ private String brokerUsername;
+ @Value("${broker-password:}")
+ private String brokerPassword;
+
+ public BrokerClientProperties() {
+ brokerName = "broker";
+ brokerUrl = "ssl://localhost:61616}";
+ brokerUrlProperties = "";
+ connectorPort = -1;
+ preserveConnection = true;
+
+ truststoreFile = "";
+ truststoreType = "";
+ truststorePassword = "";
+ keystoreFile = "";
+ keystoreType = "";
+ keystorePassword = "";
+ clientAuthRequired = false;
+
+ brokerUsername = "";
+ brokerPassword = "";
+ }
+
+ public BrokerClientProperties(java.util.Properties p) {
+ brokerName = p.getProperty("brokerclient.broker-name", "broker");
+ brokerUrl = p.getProperty("brokerclient.broker-url", "ssl://localhost:61616}");
+ brokerUrlProperties = p.getProperty("brokerclient.broker-url-properties", "");
+ connectorPort = Integer.parseInt(p.getProperty("brokerclient.connector-port", "-1"));
+ preserveConnection = Boolean.parseBoolean(p.getProperty("brokerclient.preserve-connection", "true"));
+
+ truststoreFile = p.getProperty("brokerclient.ssl.truststore.file", "");
+ truststoreType = p.getProperty("brokerclient.ssl.truststore.type", "");
+ truststorePassword = p.getProperty("brokerclient.ssl.truststore.password", "");
+ keystoreFile = p.getProperty("brokerclient.ssl.keystore.file", "");
+ keystoreType = p.getProperty("brokerclient.ssl.keystore.type", "");
+ keystorePassword = p.getProperty("brokerclient.ssl.keystore.password", "");
+ clientAuthRequired = Boolean.parseBoolean(p.getProperty("brokerclient.ssl.client-auth.required", "false"));
+
+ brokerUsername = p.getProperty("brokerclient.broker-username", "");
+ brokerPassword = p.getProperty("brokerclient.broker-password", "");
+
+ brokerUrlProperties = brokerUrlProperties.replace("${brokerclient.ssl.client-auth.required}", Boolean.toString(clientAuthRequired));
+ }
+}
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/EventFields.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/EventFields.java
new file mode 100644
index 0000000000000000000000000000000000000000..6859637ee80ae769a6e9fd508b4a3256beea40ec
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/EventFields.java
@@ -0,0 +1,72 @@
+package eu.melodic.event.brokerclient.templates;
+
+public class EventFields {
+ /**
+ * This event is used to send realtime metric values
+ */
+ public static class MetricEventFields{
+ public static final String timestamp = "timestamp";
+ public static final String metric_value = "metricValue";
+ public static final String refers_to = "refersTo";
+ public static final String cloud = "cloud";
+ public static final String provider = "provider";
+ }
+ /**
+ * This event is used to send predicted metric values
+ */
+ public static class PredictionMetricEventFields{
+ public static final String timestamp = "timestamp";
+ public static final String level = "level";
+ public static final String metric_value = "metricValue";
+ public static final String refers_to = "refersTo";
+ public static final String cloud = "cloud";
+ public static final String provider = "provider";
+ public static final String probability = "probability";
+ public static final String confidence_interval = "confidence_interval";
+ public static final String prediction_time = "predictionTime";
+ }
+ /**
+ * This event is used to send a list of metric values which should be predicted by forecasting methods. The fields which are included in this event reflect the fields which should be included in each JSON object (this event is a JSON Array consisting of multiple JSON objects)
+ */
+ public static class TranslatorToForecastingMethodsFieldsPerMetric{
+ public static final String metric = "metric";
+ public static final String level = "level";
+ public static final String publish_rate = "publish_rate";
+ }
+ /**
+ * This event is used to indicate that a (re)training of the forecasting method has been performed
+ */
+ public static class ForecastingMethodsToPredictionOrchestratorTrainingEventFields{
+ public static final String metrics = "metrics";
+ public static final String forecasting_method = "forecasting_method";
+ public static final String timestamp = "timestamp";
+ }
+ /**
+ * This event is used to send intermediate predictions generated by individual forecasting methods
+ */
+ public static class ForecastingMethodsToPredictionOrchestratorIntermediatePredictionsFields{
+ public static final String timestamp = "timestamp";
+ public static final String metric_value = "metricValue";
+ public static final String level = "level";
+ public static final String refers_to = "refersTo";
+ public static final String cloud = "cloud";
+ public static final String provider = "provider";
+ public static final String probability = "probability";
+ public static final String confidence_interval = "confidence_interval";
+ public static final String prediction_time = "predictionTime";
+ }
+ /**
+ * This event is used to initiate forecasting of one or more monitoring metrics
+ */
+ public static class PredictionOrchestratorToForecastingMethodsStartForecastingEventFields{
+ public static final String metrics = "metrics";
+ public static final String timestamp = "timestamp";
+ }
+ /**
+ * This event is used to stop forecasting of one or more monitoring metrics
+ */
+ public static class PredictionOrchestratorToForecastingMethodsStopForecastingEventFields{
+ public static final String metrics = "metrics";
+ public static final String timestamp = "timestamp";
+ }
+}
diff --git a/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/TopicNames.java b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/TopicNames.java
new file mode 100644
index 0000000000000000000000000000000000000000..94a3b6644cc3273a8b232f0d6f45575ebba4bb83
--- /dev/null
+++ b/amq-message-java-library/src/main/java/eu/melodic/event/brokerclient/templates/TopicNames.java
@@ -0,0 +1,91 @@
+package eu.melodic.event.brokerclient.templates;
+
+public class TopicNames {
+
+ /**
+ * The realtime values which are published for a particular metric, by the EMS
+ * @param metric_name The metric for which the monitoring values are desired
+ * @return A String value containing the topic which is used to publish the realtime values of a metric
+ */
+ public static String realtime_metric_values_topic (String metric_name){
+ return metric_name;
+ }
+
+ /**
+ * The final metric predictions which are produced by the Prediction Orchestrator
+ * @param predicted_metric_name The name of the metric for which predictions are published
+ * @return A String value containing the topic with the name of the metric
+ */
+ public static String final_metric_predictions_topic (String predicted_metric_name){
+ return "prediction."+predicted_metric_name;
+ }
+
+ /**
+ * The topic which is used by the translator to inform forecasting methods that a metric should be predicted
+ */
+ public static String translator_to_forecasting_methods_topic = "metrics_to_predict";
+ /**
+ * The topic which is used by forecasting methods to inform the prediction orchestrator that a (re)training of a prediction method has been performed
+ */
+ public static String forecasting_methods_to_prediction_orchestrator_training_topic = "training_models";
+
+
+ /**
+ * The topic which should be used to transmit intermediate forecasts for a particular metric by prediction methods
+ */
+ public static class ForecastingMethodsToPredictionOrchestratorPredictionsTopic{
+ public static String nbeats(String metric_name) {
+ return "intermediate_prediction.nbeats."+metric_name;
+ }
+ public static String es_hybrid(String metric_name) {
+ return "intermediate_prediction.eshybrid."+metric_name;
+ }
+ public static String arima(String metric_name){
+ return "intermediate_prediction.arima." +metric_name;
+ }
+ public static String tsetlin_machines(String metric_name) {
+ return "intermediate_prediction.tsetlinmachines."+metric_name;
+ }
+ public static String exponential_smoothing(String metric_name) {
+ return "intermediate_prediction.exponentialsmoothing."+metric_name;
+ }
+ public static String lstm(String metric_name) {
+ return "intermediate_prediction.lstm."+metric_name;
+ }
+ public static String gluon_machines(String metric_name) {
+ return "intermediate_prediction.gluonmachines."+metric_name;
+ }
+ public static String prophet(String metric_name){
+ return "intermediate_prediction.prophet."+metric_name;
+ }
+ }
+
+
+ /**
+ * The topic which is used to notify a forecasting method to start forecasting one or more metrics
+ */
+ public static class PredictionOrchestratorToForecastingMethodsStartForecastingTopic {
+ public static final String nbeats = "start_forecasting.nbeats";
+ public static final String es_hybrid = "start_forecasting.eshybrid";
+ public static final String arima = "start_forecasting.arima";
+ public static final String tsetlin_machines = "start_forecasting.tsetlinmachines";
+ public static final String exponential_smoothing = "start_forecasting.exponentialsmoothing";
+ public static final String lstm = "start_forecasting.lstm";
+ public static final String gluon_machines = "start_forecasting.gluonmachines";
+ public static final String prophet = "start_forecasting.prophet";
+ }
+
+ /**
+ * The topic which is used to notify a forecasting method to stop forecasting one or more metrics
+ */
+ public static class PredictionOrchestratorToForecastingMethodsStopForecastingTopic {
+ public static final String nbeats = "stop_forecasting.nbeats";
+ public static final String es_hybrid = "stop_forecasting.eshybrid";
+ public static final String arima = "stop_forecasting.arima";
+ public static final String tsetlin_machines = "stop_forecasting.tsetlinmachines";
+ public static final String exponential_smoothing = "stop_forecasting.exponentialsmoothing";
+ public static final String lstm = "stop_forecasting.lstm";
+ public static final String gluon_machines = "stop_forecasting.gluonmachines";
+ public static final String prophet = "stop_forecasting.prophet";
+ }
+}
diff --git a/amq-message-python-library/Event.py b/amq-message-python-library/Event.py
new file mode 100644
index 0000000000000000000000000000000000000000..52dbc842ef3c729386bed8f8e7cd4b92bc9420c6
--- /dev/null
+++ b/amq-message-python-library/Event.py
@@ -0,0 +1,420 @@
+
+
+class Metric(enumerate):
+ """
+ [0] (current/detected) Metrics & SLOs Events Format:
+
+
+ This event is aggregated by EMS and it is persisted in InfluxDB. Moreover,
+ Prediction Orchestrator will subscribe and receive the current metrics in order to
+ evaluate the forecasting methods, according to the defined KPIs (e.g., MAPE)
+
+ * Topic: [metric_name]
+ > (e.g. MaxCPULoad)
+
+
+ {
+ "metricValue": 12.34,
+
+ "level": 1,
+
+ "timestamp": 143532341251,
+
+ "refersTo": "MySQL_12345",
+
+ "cloud": "AWS-Dublin",
+
+ "provider": "AWS"
+
+ }
+
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+ TIMESTAMP = "timestamp"
+ METRIC_VALUE = "metricValue"
+ REFERS_TO = "refersTo"
+ CLOUD = "cloud"
+ PROVIDER = "provider"
+
+
+
+class PredictionMetric(enumerate):
+
+ """
+ [1] Predicted Metrics & SLOs Events Format
+
+
+ This event is produced by the Prediction Orchestrator and reflects the final predicted value for a metric.
+
+ - Topic: prediction.[metric_name]
+ > (e.g. prediction.MaxCPULoad)
+
+
+ {
+ "metricValue": 12.34,
+
+ "level": 1,
+
+ "timestamp": 143532341251,
+
+ "probability": 0.98,
+
+ "confidence_interval " : [8,15]
+
+ "predictionTime": 143532342,
+
+ "refersTo": "MySQL_12345",
+
+ "cloud": "AWS-Dublin",
+
+ "provider": "AWS"
+
+ }
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+
+ _match = "prediction."
+
+ METRICVALUE= "metricValue"
+ '''Predicted metric value'''
+ LEVEL= "level"
+ '''Level of VM where prediction occurred or refers'''
+ TIMESTAMP= "timestamp"
+ '''Prediction creation date/time from epoch'''
+ PROBABILITY= "probability"
+ '''Probability of the predicted metric value (range 0..1)'''
+ CONFIDENCE_INTERVAL= "confidence_interval"
+ '''the probability-confidence interval for the prediction'''
+ PREDICTION_TIME= "predictionTime"
+ '''This refers to time point in the imminent future (that is relative to the time
+ that is needed for reconfiguration) for which the predicted value is considered
+ valid/accurate (in UNIX Epoch)'''
+ REFERSTO= "refersTo"
+ '''The id of the application or component or (VM) host for which the prediction refers to'''
+ CLOUD= "cloud"
+ '''Cloud provider of the VM (with location)'''
+ PROVIDER= "provider"
+ '''Cloud provider name'''
+
+
+
+class MetricsToPredict(enumerate):
+
+ """
+ [2] Translator – to – Forecasting Methods/Prediction Orchestrator Events Format
+
+
+ This event is produced by the translator, to:
+
+ imform Dataset Maker which metrics should subscribe to in order to aggregate the appropriate tanning dataset in the time-series DB.
+ instruct each of the Forecasting methods to predict the values of one or more monitoring metrics
+ inform the Prediction Orchestrator for the metrics which will be forecasted
+
+ * Topic: metrics_to_predict
+
+
+ *Note:* This event could be communicated through Mule
+
+
+ [
+ {
+
+ "metric": "MaxCPULoad",
+
+ "level": 3,
+
+ "publish_rate": 60000,
+
+ },
+
+ {
+
+ "metric": "MinCPULoad",
+
+ "level": 3,
+
+ "publish_rate": 50000,
+
+ }
+
+ ]
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+
+ _match = "metrics_to_predict"
+
+ METRIC = "metric"
+ '''name of the metric to predict'''
+ LEVEL = "level"
+ '''Level of monitoring topology where this metric may be produced/found'''
+ PUBLISH_RATE = "publish_rate"
+ '''expected rate for datapoints regarding the specific metric (according to CAMEL)'''
+
+
+class TraningModels(enumerate):
+ """
+
+ [3] Forecasting Methods – to – Prediction Orchestrator Events Format
+
+
+ This event is produced by each of the Forecasting methods, to inform the
+ Prediction Orchestrator that the method has (re-)trained its model for one or more metrics.
+
+ * Topic: training_models
+
+
+ {
+
+ "metrics": ["MaxCPULoad","MinCPULoad"]",
+
+ "forecasting_method": "ESHybrid",
+
+ "timestamp": 143532341251,
+
+ }
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+ _match = "training_models"
+
+ METRICS = "metrics"
+ '''metrics for which a certain forecasting method has successfully trained or re-trained its model'''
+ FORECASTING_METHOD = "forecasting_method"
+ '''the method that is currently re-training its models'''
+ TIMESTAMP = "timestamp"
+ '''date/time of model(s) (re-)training'''
+
+
+class IntermediatePrediction(enumerate):
+ """
+
+ [4] Forecasting Methods – to – Prediction Orchestrator Events Format
+
+
+ This event is produced by each of the Forecasting methods, and is used by the Prediction Orchestrator to determine the final prediction value for the particular metric.
+
+
+ * Topic: intermediate_prediction.[forecasting_method].[metric_name]
+ * (e.g. intermediate_prediction.ESHybrid.MaxCPULoad)
+ * We note that any component will be able to subscribe to topics like:
+ * intermediate_prediction.*.MaxCPULoad → gets MaxCPULoad predictions produced by all forecasting methods or
+ * intermediate_prediction.ESHybrid.* → gets all metrics predictions from ESHybrid method
+ * We consider that each forecasting method publishes a static (but configurable) number m of predicted values (under the same timestamp) for time points into the future. These time points into the future are relevant to the reconfiguration time that it is needed (and can also be updated).
+ * For example if we configure m=5 predictions into the future and the reconfiguration time needed is TR=10 minutes, then at t0 a forecasting method publishes 5 events with the same timestamp and prediction times t0+10, t0+20, t0+30, t0+40, t0+50.
+
+
+
+ {
+ "metricValue": 12.34,
+
+ "level": 3,
+
+ "timestamp": 143532341251,
+
+ "probability": 0.98,
+
+ "confidence_interval " : [8,15]
+
+ "predictionTime": 143532342,
+
+ "refersTo": "MySQL_12345",
+
+ "cloud": "AWS-Dublin",
+
+ "provider": "AWS"
+
+ }
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+ """
+
+ _match="intermediate_prediction."
+
+ METRICVALUE = "metricValue"
+ '''Predicted metric value (more than one such events will be produced for different time points into the future – this can be valuable to the Prediction Orchestrator in certain situations e.g., forecasting method is unreachable for a time period)'''
+
+ LEVEL = "level"
+ '''Level of VM where prediction occurred or refers'''
+
+ TIMESTAMP = "timestamp"
+ '''Prediction creation date/time from epoch'''
+
+ PROBABILITY = "probability"
+ '''Probability of the predicted metric value (range 0..1)'''
+
+ CONFIDENCE_INTERVAL = "confidence_interval"
+ '''the probability-confidence interval for the prediction'''
+
+ PREDICTION_TIME = "predictionTime"
+ '''This refers to time point in the imminent future (that is relative to the time that is needed for reconfiguration) for which the predicted value is considered valid/accurate (in UNIX Epoch)'''
+
+ REFERS_TO = "refersTo"
+ '''The id of the application or component or (VM) host for which the prediction refers to'''
+
+ CLOUD = "cloud"
+ '''Cloud provider of the VM (with location)'''
+
+ PROVIDER = "provider"
+ '''Cloud provider name'''
+
+
+
+class Prediction(enumerate):
+ """
+
+ [5] Prediction Orchestrator – to – Severity-based SLO Violation Detector Events Format
+
+
+ This event is used by the Prediction Orchestrator to inform the SLO Violation Detector about the current values of a metric, which can possibly lead to an SLO Violation detection.
+
+ * Topic: prediction.[metric_name]
+ * (e.g. prediction.MaxCPULoad)
+
+
+ {
+ "metricValue": 12.34,
+
+ "level": 1,
+
+ "timestamp": 143532341251,
+
+ "probability": 0.98,
+
+ "confidence_interval " : [8,15]
+
+ "predictionTime": 143532342,
+
+ "refersTo": "MySQL_12345",
+
+ "cloud": "AWS-Dublin",
+
+ "provider": "AWS"
+
+ }
+
+
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+
+ """
+
+ _match = "prediction."
+
+ METRICVALUE = "metricValue"
+ '''Predicted metric value'''
+
+ LEVEL = "level"
+ '''Level of VM where prediction occurred or refers'''
+
+ TIMESTAMP = "timestamp"
+ '''Prediction creation date/time from epoch'''
+
+ PROBABILITY = "probability"
+ '''Probability of the predicted metric value (range 0..1)'''
+
+ CONFIDENCE_INTERVAL = "confidence_interval"
+ '''the probability-confidence interval for the prediction'''
+
+ PREDICTIONTIME = "predictionTime"
+ '''This refers to time point in the imminent future (that is relative to the time that is needed for reconfiguration) for which the predicted value is considered valid/accurate (in UNIX Epoch)'''
+
+ REFERSTO = "refersTo"
+ '''The id of the application or component or (VM) host for which the prediction refers to'''
+
+ CLOUD = "cloud"
+ '''Cloud provider of the VM (with location)'''
+
+ PROVIDER = "provider"
+ '''Cloud provider name'''
+
+
+class StopForecasting(enumerate):
+ """
+ [6] Prediction Orchestrator – to – Forecasting Methods Events Format
+
+
+ This event is used by the Prediction Orchestrator to instruct a forecasting method to stop producing predicted values for a selection of metrics.
+
+
+ * Topic: stop_forecasting.[forecasting_method]
+ * Each component that implements a specific forecasting method it should subscribe to its relevant topic (e.g. the ES-Hybrid component should subscribe to stop_forecasting.eshybrid topic)
+
+
+ {
+ "metrics": ["MaxCPULoad","MinCPULoad"],
+ "timestamp": 143532341251,
+ }
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+
+ """
+
+ _match="stop_forecasting."
+
+ METRICS = "metrics"
+ '''metrics for which a certain method should stop producing predictions (because of poor results)'''
+ TIMESTAMP = "timestamp"
+ '''date/time of the command of the orchestrator'''
+
+
+class StartForecasting(enumerate):
+ """
+
+ [7] Prediction Orchestrator – to – Forecasting Methods Events Format
+
+ This event is used by the Prediction Orchestrator to instruct a forecasting method to start producing predicted values for a selection of metrics.
+
+
+ * Topic: start_forecasting.[forecasting_method]
+ * Each component that implements a specific forecasting method it should subscribe to its relevant topic (e.g. the ES-Hybrid component should subscribe to start_forecasting.eshybrid topic)
+ * We consider that each forecasting method should publish a static (but configurable) number m of predicted values (under the same timestamp) for time points into the future. These time points into the future are relevant to the reconfiguration time that it is needed (and can also be updated).
+ * For example if we configure m=5 predictions into the future and the reconfiguration time needed is TR=10 minutes, then at t0 a forecasting method publishes 5 events with the same timestamp and prediction times t0+10, t0+20, t0+30, t0+40, t0+50.
+
+
+
+
+ {
+ "metrics": ["MaxCPULoad","MinCPULoad"],
+
+ "timestamp": 143532341251,
+
+ "epoch_start": 143532341252,
+
+ "number_of_forward_predictions": 5,
+
+ "prediction_horizon": 600
+
+ }
+
+ https://confluence.7bulls.eu/display/MOR/Forecasting+Mechanism+Sub-components+Communication
+
+
+ """
+
+ _match="start_forecasting."
+
+ METRICS = "metrics"
+ '''metrics for which a certain method should start producing predictions'''
+ TIMESTAMP = "timestamp"
+ '''date/time of the command of the orchestrator'''
+ EPOCH_START = "epoch_start"
+ '''this time refers to the start time after which all predictions will be considered (i.e. t0)'''
+ NUMBER_OF_FORWARD_PREDICTIONS = "number_of_forward_predictions"
+ ''' this is a number that indicates how many time points into the future do we need predictions for.'''
+ PREDICTION_HORIZON = "prediction_horizon"
+ '''This time equals to the time (in seconds) that is needed for the platform to implement an application reconfiguration (i.e. TR).'''
\ No newline at end of file
diff --git a/amq-message-python-library/MorphemicConnection.py b/amq-message-python-library/MorphemicConnection.py
new file mode 100644
index 0000000000000000000000000000000000000000..59f8dd0fe81e4d42886b27a3224ad213a67d07ff
--- /dev/null
+++ b/amq-message-python-library/MorphemicConnection.py
@@ -0,0 +1,84 @@
+
+import stomp
+import logging
+import json
+
+from stomp.listener import PrintingListener
+
+class Connection:
+
+ subscriptions = []
+
+ def __init__(self, username, password,
+ host='localhost',
+ port=61613,
+ debug=True):
+
+ self.hosts = [(host, port)]
+ self.username = username
+ self.password = password
+ self.conn = stomp.Connection(host_and_ports=self.hosts)
+
+ if debug:
+ print("Enabling debug")
+ self.conn.set_listener('print', PrintingListener())
+
+ def set_listener(self, id, listener):
+ if self.conn:
+ self.conn.set_listener(id,listener)
+
+ def subscribe(self,destination, id, ack='auto'):
+
+ ref = next((item for item in self.subscriptions if item['destination'] == destination and item['id'] == id), None)
+
+ if not ref:
+ self.subscriptions.append(
+ {
+ 'id': id,
+ 'destination': destination,
+ 'ack': ack,
+ }
+ )
+
+ def topic(self,destination, id, ack='auto'):
+ self.subscribe("/topic/%s" % destination ,id,ack)
+
+ def queue(self,destination, id, ack='auto'):
+ self.subscribe("/queue/%s" % destination,id,ack)
+
+
+ def unsubscribe(self, id):
+ if not self.conn:
+ return
+
+ if not self.conn.running:
+ return
+
+ self.conn.unsubscribe(id)
+
+
+ def connect(self, wait=True):
+
+ if not self.conn:
+ return
+
+ self.conn.connect(self.username, self.password, wait=wait)
+ for s in self.subscriptions:
+ self.conn.subscribe(s['destination'], s['id'], s['ack'])
+
+
+ def disconnect(self):
+
+ for s in self.subscriptions:
+ self.unsubscribe(s['id'])
+
+ self.conn.disconnect()
+
+
+ def send_to_topic(self,destination, body, headers={}, **kwargs):
+
+ if not self.conn:
+ logging.error("Connect first")
+ return
+
+ self.conn.send(destination="/topic/%s" % destination,body=json.dumps(body),content_type="application/json",headers=headers, **kwargs)
diff --git a/amq-message-python-library/MorphemicListener.py b/amq-message-python-library/MorphemicListener.py
new file mode 100644
index 0000000000000000000000000000000000000000..1317bf7609593f9181a547d3587e315b17c0bb0e
--- /dev/null
+++ b/amq-message-python-library/MorphemicListener.py
@@ -0,0 +1,29 @@
+
+from stomp.listener import ConnectionListener
+import logging
+import json
+
+class MorphemicListener(ConnectionListener):
+
+ def is_topic(self,headers, event):
+ if not hasattr(event,"_match"):
+ return False
+ match = getattr(event,'_match')
+ return headers.get('destination').startswith(match)
+
+ def _unknown_message(self,body):
+ logging.debug("Unknown message %s ",body)
+ pass
+
+
+ def on_message(self, headers, body):
+
+ logging.debug("Headers %s",headers)
+ logging.debug(" %s",body)
+
+ func_name='on_%s' % headers.get('destination').replace('/topic/','')
+ if hasattr(self,func_name):
+ func = getattr(self, func_name)
+ func(json.loads(body))
+ else:
+ self._unknown_message(body)
diff --git a/amq-message-python-library/Payloads.py b/amq-message-python-library/Payloads.py
new file mode 100644
index 0000000000000000000000000000000000000000..5de1adc844e289db185d74f7c7d76bca0045d686
--- /dev/null
+++ b/amq-message-python-library/Payloads.py
@@ -0,0 +1,10 @@
+
+class MetricsToPredict:
+
+
+ def load(self,body):
+ self.metrics = body["metrics"]
+ self.timestamp = body["timestamp"]
+ self.epoch_start = body["epoch_start"]
+ self.number_of_forward_predictions = body["number_of_forward_predictions"]
+ self.prediction_horizon = body["prediction_horizon"]
diff --git a/amq-message-python-library/__init__.py b/amq-message-python-library/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..45fe25b1772cc225e9ec364aa83620bd07f4e3a7
--- /dev/null
+++ b/amq-message-python-library/__init__.py
@@ -0,0 +1,5 @@
+
+from . import MorphemicConnection as morphemic
+from . import MorphemicListener as listener
+from . import Event as events
+from . import Payloads as payloads
\ No newline at end of file
diff --git a/amq-message-python-library/requirements.txt b/amq-message-python-library/requirements.txt
new file mode 100644
index 0000000000000000000000000000000000000000..12393625777ba6eee6deadd1983d648112aa02ef
--- /dev/null
+++ b/amq-message-python-library/requirements.txt
@@ -0,0 +1 @@
+stomp.py
diff --git a/morphemic-datasetmaker/morphemic/dataset/__init__.py b/morphemic-datasetmaker/morphemic/dataset/__init__.py
index db2c96f242c1efdde2a86de5515b6455ce02b0f7..a8a477dfc2634053e952c749b71507ff41a80825 100644
--- a/morphemic-datasetmaker/morphemic/dataset/__init__.py
+++ b/morphemic-datasetmaker/morphemic/dataset/__init__.py
@@ -10,7 +10,11 @@ class Row():
self.features = features
if "time" in self.features:
time_str = self.features["time"]
- _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%S.%fZ')
+ _obj = None
+ try:
+ _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%S.%fZ')
+ except:
+ _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%SZ')
self.features["time"] = int(_obj.timestamp())
if 'application' in metricsname:
metricsname.remove('application')
@@ -18,6 +22,7 @@ class Row():
if not field_name in self.features:
self.features[field_name] = None
+
def getTime(self):
if "time" in self.features:
return self.features["time"]
diff --git a/morphemic-performance-model/ml_code/datasetlib b/morphemic-performance-model/ml_code/datasetlib
deleted file mode 160000
index c9c6d3c954b57f9dd3b5109514bd033da00c95db..0000000000000000000000000000000000000000
--- a/morphemic-performance-model/ml_code/datasetlib
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit c9c6d3c954b57f9dd3b5109514bd033da00c95db
diff --git a/morphemic-performance-model/ml_code/datasetlib/CHANGES.txt b/morphemic-performance-model/ml_code/datasetlib/CHANGES.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/PKG-INFO b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/PKG-INFO
new file mode 100644
index 0000000000000000000000000000000000000000..bdce7448c8739cdb539ea2f34b4001ce34b41a65
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/PKG-INFO
@@ -0,0 +1,79 @@
+Metadata-Version: 1.0
+Name: Dataset-Maker
+Version: 0.0.1
+Summary: Python package for creating a dataset using InfluxDB data points
+Home-page: http://git.dac.ds.unipi.gr/morphemic/datasetmaker
+Author: Jean-Didier Totow
+Author-email: totow@unipi.gr
+License: LICENSE.txt
+Description: 1. Generality
+
+ Dataset maker is morphemic python library for
+ building dataset from data points registered into InfluxDB.
+ Dataset maker receives the name of an application, the start time
+ and the tolerance interval. More details are provided below.
+
+ 2. InfluxDB format
+
+ Data points in InfluxDB should have the following format for being used
+ correctly by the dataset maker:
+
+ measurement : "application_name" #mandatory
+ timestamp : timestamp #optional
+ fields : dictionnary containing metric exposed by the given application
+ cpu_usage, memory_consumption, response_time, http_latency
+ tags : dictionnary of metrics related information
+
+ The JSON describing the above information is the following:
+
+ Ex.:
+ {"measurement": "application_name",
+ "timestamp": 155655476.453,
+ "fields": {
+ "cpu_usage": 40,
+ "memory_consumption": 67.9,
+ "response_time": 28,
+ "http_latency": 12
+ },
+ "tags": {
+ "core": 2 #cpu_usage of 40% is the usage of the cpu core number 2
+ }
+ }
+
+ If data points are presented as the above format, the dataset maker will output
+ a csv (application_name.csv) file with the following schema:
+ time, cpu_usage, memory_consumption, response_time, http_latency, core
+
+ 3. Usage
+
+
+ Warming : make sure the above variables exist before importing dataset make library
+
+ from morphemic.dataset import DatasetMaker
+
+ data_maker = DatasetMaker(application, start, configs)
+ response = data_maker.make()
+
+ application, string containing the application name
+ start, when to start building the dataset
+ Ex.: '10m' , build dataset containg data point stored the 10 last minute
+ Ex.: '3h', three hours
+ Ex.: '4d', four days
+ leave empty or set to None if you wish all data points stored in your InfluxDB
+ configs is dictionnary containg parameters
+
+ {
+ "hostname": hostname or IP of InfluxDB
+ "port": port of InfluxDB
+ "username": InfluxDB username
+ "password": password of the above user
+ "dbname": database name
+ "path_dataset": path where the dataset will be saved
+ }
+
+ the response contains
+ {'status': True,'url': url, 'application': application_name, 'features': features}
+
+ or if an error occured
+ {'status': False,'message': "reason of the error"}
+Platform: UNKNOWN
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/SOURCES.txt b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/SOURCES.txt
new file mode 100644
index 0000000000000000000000000000000000000000..80dd9ca282142debe5336789f0ffa166c3f8e614
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/SOURCES.txt
@@ -0,0 +1,9 @@
+README.txt
+setup.py
+Dataset_Maker.egg-info/PKG-INFO
+Dataset_Maker.egg-info/SOURCES.txt
+Dataset_Maker.egg-info/dependency_links.txt
+Dataset_Maker.egg-info/requires.txt
+Dataset_Maker.egg-info/top_level.txt
+morphemic/__init__.py
+morphemic/dataset/__init__.py
\ No newline at end of file
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/dependency_links.txt b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/dependency_links.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/requires.txt b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/requires.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e20f7f050a243a245d049f39f136118021deedfd
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/requires.txt
@@ -0,0 +1,2 @@
+pandas
+influxdb
diff --git a/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/top_level.txt b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..047ceb8359adfa72a917b51af6e230a7b5c3e218
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/Dataset_Maker.egg-info/top_level.txt
@@ -0,0 +1 @@
+morphemic
diff --git a/morphemic-performance-model/ml_code/datasetlib/LICENCE.txt b/morphemic-performance-model/ml_code/datasetlib/LICENCE.txt
new file mode 100644
index 0000000000000000000000000000000000000000..2d70f41f07748e787bc3e8c0540add418f055ad4
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/LICENCE.txt
@@ -0,0 +1,22 @@
+Copyright (c) 2021 unipi.gr
+
+MIT License
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/morphemic-performance-model/ml_code/datasetlib/README.txt b/morphemic-performance-model/ml_code/datasetlib/README.txt
new file mode 100644
index 0000000000000000000000000000000000000000..3f08da5895cd5b32256914cb073b035e736bdc52
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/README.txt
@@ -0,0 +1,71 @@
+1. Generality
+
+Dataset maker is morphemic python library for
+building dataset from data points registered into InfluxDB.
+Dataset maker receives the name of an application, the start time
+and the tolerance interval. More details are provided below.
+
+2. InfluxDB format
+
+Data points in InfluxDB should have the following format for being used
+correctly by the dataset maker:
+
+measurement : "application_name" #mandatory
+timestamp : timestamp #optional
+fields : dictionnary containing metric exposed by the given application
+ cpu_usage, memory_consumption, response_time, http_latency
+tags : dictionnary of metrics related information
+
+The JSON describing the above information is the following:
+
+Ex.:
+ {"measurement": "application_name",
+ "timestamp": 155655476.453,
+ "fields": {
+ "cpu_usage": 40,
+ "memory_consumption": 67.9,
+ "response_time": 28,
+ "http_latency": 12
+ },
+ "tags": {
+ "core": 2 #cpu_usage of 40% is the usage of the cpu core number 2
+ }
+ }
+
+If data points are presented as the above format, the dataset maker will output
+a csv (application_name.csv) file with the following schema:
+time, cpu_usage, memory_consumption, response_time, http_latency, core
+
+3. Usage
+
+
+Warming : make sure the above variables exist before importing dataset make library
+
+from morphemic.dataset import DatasetMaker
+
+data_maker = DatasetMaker(application, start, configs)
+response = data_maker.make()
+
+application, string containing the application name
+start, when to start building the dataset
+Ex.: '10m' , build dataset containg data point stored the 10 last minute
+Ex.: '3h', three hours
+Ex.: '4d', four days
+leave empty or set to None if you wish all data points stored in your InfluxDB
+configs is dictionnary containg parameters
+
+{
+ "hostname": hostname or IP of InfluxDB
+ "port": port of InfluxDB
+ "username": InfluxDB username
+ "password": password of the above user
+ "dbname": database name
+ "path_dataset": path where the dataset will be saved
+}
+
+the response contains
+{'status': True,'url': url, 'application': application_name, 'features': features}
+
+or if an error occured
+
+{'status': False,'message': "reason of the error"}
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/PKG-INFO b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/PKG-INFO
new file mode 100644
index 0000000000000000000000000000000000000000..384bb8a358d1d67494d892b812245da9eec5376e
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/PKG-INFO
@@ -0,0 +1,79 @@
+Metadata-Version: 1.0
+Name: datasetmaker
+Version: 0.0.1
+Summary: Python package for creating a dataset using InfluxDB data points
+Home-page: http://git.dac.ds.unipi.gr/morphemic/datasetmaker
+Author: Jean-Didier Totow
+Author-email: totow@unipi.gr
+License: LICENSE.txt
+Description: 1. Generality
+
+ Dataset maker is morphemic python library for
+ building dataset from data points registered into InfluxDB.
+ Dataset maker receives the name of an application, the start time
+ and the tolerance interval. More details are provided below.
+
+ 2. InfluxDB format
+
+ Data points in InfluxDB should have the following format for being used
+ correctly by the dataset maker:
+
+ measurement : "application_name" #mandatory
+ timestamp : timestamp #optional
+ fields : dictionnary containing metric exposed by the given application
+ cpu_usage, memory_consumption, response_time, http_latency
+ tags : dictionnary of metrics related information
+
+ The JSON describing the above information is the following:
+
+ Ex.:
+ {"measurement": "application_name",
+ "timestamp": 155655476.453,
+ "fields": {
+ "cpu_usage": 40,
+ "memory_consumption": 67.9,
+ "response_time": 28,
+ "http_latency": 12
+ },
+ "tags": {
+ "core": 2 #cpu_usage of 40% is the usage of the cpu core number 2
+ }
+ }
+
+ If data points are presented as the above format, the dataset maker will output
+ a csv (application_name.csv) file with the following schema:
+ time, cpu_usage, memory_consumption, response_time, http_latency, core
+
+ 3. Usage
+
+
+ Warming : make sure the above variables exist before importing dataset make library
+
+ from morphemic.dataset import DatasetMaker
+
+ data_maker = DatasetMaker(application, start, configs)
+ response = data_maker.make()
+
+ application, string containing the application name
+ start, when to start building the dataset
+ Ex.: '10m' , build dataset containg data point stored the 10 last minute
+ Ex.: '3h', three hours
+ Ex.: '4d', four days
+ leave empty or set to None if you wish all data points stored in your InfluxDB
+ configs is dictionnary containg parameters
+
+ {
+ "hostname": hostname or IP of InfluxDB
+ "port": port of InfluxDB
+ "username": InfluxDB username
+ "password": password of the above user
+ "dbname": database name
+ "path_dataset": path where the dataset will be saved
+ }
+
+ the response contains
+ {'status': True,'url': url, 'application': application_name, 'features': features}
+
+ or if an error occured
+ {'status': False,'message': "reason of the error"}
+Platform: UNKNOWN
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/SOURCES.txt b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/SOURCES.txt
new file mode 100644
index 0000000000000000000000000000000000000000..3d8cbf44478cca30eab5b651db9f18ab1b9348fb
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/SOURCES.txt
@@ -0,0 +1,9 @@
+README.txt
+setup.py
+datasetmaker.egg-info/PKG-INFO
+datasetmaker.egg-info/SOURCES.txt
+datasetmaker.egg-info/dependency_links.txt
+datasetmaker.egg-info/requires.txt
+datasetmaker.egg-info/top_level.txt
+morphemic/__init__.py
+morphemic/dataset/__init__.py
\ No newline at end of file
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/dependency_links.txt b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/dependency_links.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/dependency_links.txt
@@ -0,0 +1 @@
+
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/requires.txt b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/requires.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e20f7f050a243a245d049f39f136118021deedfd
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/requires.txt
@@ -0,0 +1,2 @@
+pandas
+influxdb
diff --git a/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/top_level.txt b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/top_level.txt
new file mode 100644
index 0000000000000000000000000000000000000000..047ceb8359adfa72a917b51af6e230a7b5c3e218
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/datasetmaker.egg-info/top_level.txt
@@ -0,0 +1 @@
+morphemic
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/__init__.py b/morphemic-performance-model/ml_code/datasetlib/morphemic/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-36.pyc b/morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..b4a41f6352cbab9846bba6c984fe6bbaef058d5e
Binary files /dev/null and b/morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-36.pyc differ
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-37.pyc b/morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..f27e8c483244bafadd7420e9bb4b52f19d60d5eb
Binary files /dev/null and b/morphemic-performance-model/ml_code/datasetlib/morphemic/__pycache__/__init__.cpython-37.pyc differ
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__init__.py b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..db2c96f242c1efdde2a86de5515b6455ce02b0f7
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__init__.py
@@ -0,0 +1,154 @@
+import os, json, time
+from influxdb import InfluxDBClient
+import pandas as pd
+from datetime import datetime
+
+url_path_dataset = None
+
+class Row():
+ def __init__(self, features,metricsname):
+ self.features = features
+ if "time" in self.features:
+ time_str = self.features["time"]
+ _obj = datetime.strptime(time_str,'%Y-%m-%dT%H:%M:%S.%fZ')
+ self.features["time"] = int(_obj.timestamp())
+ if 'application' in metricsname:
+ metricsname.remove('application')
+ for field_name in metricsname:
+ if not field_name in self.features:
+ self.features[field_name] = None
+
+ def getTime(self):
+ if "time" in self.features:
+ return self.features["time"]
+ if "timestamp" in self.features:
+ return self.features["timestamp"]
+ return None
+
+ def makeCsvRow(self):
+ if "application" in self.features:
+ del self.features["application"]
+ result = ''
+ for key, _value in self.features.items():
+ result += "{0},".format(_value)
+ return result[:-1] + "\n"
+
+class Dataset():
+ def __init__(self):
+ self.rows = {}
+ self.size = 0
+ def addRow(self,row):
+ self.rows[row.getTime()] = row
+ self.size +=1
+ def reset(self):
+ self.rows = {}
+ self.size = 0
+ print("Dataset reset")
+ def getSize(self):
+ return self.size
+ def sortRows(self):
+ return sorted(list(self.rows.values()), key=lambda x: x.getTime(), reverse=True)
+ def getRows(self):
+ return list(self.rows.values())
+ def getRow(self,_time, tolerance):
+ for i in range(tolerance):
+ if int(_time + i) in self.rows:
+ return self.rows[int(_time+i)]
+ return None
+ def save(self,metricnames,application_name):
+ if "application" in metricnames:
+ metricnames.remove("application")
+ dataset_content = ''
+ for metric in metricnames:
+ dataset_content += "{0},".format(metric)
+ dataset_content = dataset_content[:-1] + "\n"
+ for row in list(self.rows.values()):
+ dataset_content += row.makeCsvRow()
+ _file = open(url_path_dataset + "{0}.csv".format(application_name),'w')
+ _file.write(dataset_content)
+ _file.close()
+ return url_path_dataset + "{0}.csv".format(application_name)
+
+class DatasetMaker():
+ def __init__(self, application, start, configs):
+ self.application = application
+ self.start_filter = start
+ self.influxdb = InfluxDBClient(host=configs['hostname'], port=configs['port'], username=configs['username'], password=configs['password'], database=configs['dbname'])
+ self.dataset = Dataset()
+ self.tolerance = 5
+ global url_path_dataset
+ url_path_dataset = configs['path_dataset']
+ if url_path_dataset[-1] != "/":
+ url_path_dataset += "/"
+
+ def getIndex(self, columns, name):
+ return columns.index(name)
+
+ def makeRow(self,columns, values):
+ row = {}
+ index = 0
+ for column in columns:
+ row[column] = values[index]
+ index +=1
+ return row
+
+ def prepareResultSet(self, result_set):
+ result = []
+ columns = result_set["series"][0]["columns"]
+ series_values = result_set["series"][0]["values"]
+ index = 0
+ for _values in series_values:
+ row = self.makeRow(columns,_values)
+ result.append(row)
+ return result
+
+ def make(self):
+ try:
+ self.influxdb.ping()
+ except Exception as e:
+ print("Could not establish connexion with InfluxDB, please verify connexion parameters")
+ print(e)
+ return {"message": "Could not establish connexion with InfluxDB, please verify connexion parameters"}
+ if self.getData() == None:
+ return {"message":"No data found"}
+
+ metricnames, _data = self.getData()
+ for _row in _data:
+ row = Row(_row,metricnames)
+ self.dataset.addRow(row)
+
+ print("Rows construction completed")
+ print("{0} rows found".format(self.dataset.getSize()))
+ #self.dataset.sortRows()
+ url = self.dataset.save(metricnames,self.application)
+ features = self.getFeatures(url)
+ if features == None:
+ return {'status': False, 'message': 'An error occured while building dataset'}
+ return {'status': True,'url': url, 'application': self.application, 'features': features}
+
+ def getFeatures(self, url):
+ try:
+ df = pd.read_csv(url)
+ return df.columns.to_list()
+ except Exception as e:
+ print("Cannot extract data feature list")
+ return None
+
+ def extractMeasurement(self, _json):
+ return _json["series"][0]["columns"]
+
+ def getData(self):
+ query = None
+ try:
+ if self.start_filter != None and self.start_filter != "":
+ query = "SELECT * FROM " + self.application +" WHERE time > now() - "+ self.start_filter
+ else:
+ query = "SELECT * FROM " + self.application
+ result_set = self.influxdb.query(query=query)
+ series = self.extractMeasurement(result_set.raw)
+ #self.influxdb.close() #closing connexion
+ return [series, self.prepareResultSet(result_set.raw)]
+ except Exception as e:
+ print("Could not collect query data points")
+ print(e)
+ return None
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-36.pyc b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-36.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..9894da3b51ac629a70ae88ffcbbca133e8be4732
Binary files /dev/null and b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-36.pyc differ
diff --git a/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-37.pyc b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-37.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..fd046025c6568187cf364fe9f4aabeebc1785b0b
Binary files /dev/null and b/morphemic-performance-model/ml_code/datasetlib/morphemic/dataset/__pycache__/__init__.cpython-37.pyc differ
diff --git a/morphemic-performance-model/ml_code/datasetlib/setup.py b/morphemic-performance-model/ml_code/datasetlib/setup.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e0ad6cc5b4a81ee0492517222212539681a1176
--- /dev/null
+++ b/morphemic-performance-model/ml_code/datasetlib/setup.py
@@ -0,0 +1,18 @@
+from setuptools import setup
+
+setup(
+ name = 'datasetmaker',
+ version = '0.0.1',
+ author = 'Jean-Didier Totow',
+ author_email = 'totow@unipi.gr',
+ packages = ['morphemic', 'morphemic.dataset'],
+ scripts = [],
+ url='http://git.dac.ds.unipi.gr/morphemic/datasetmaker',
+ license='LICENSE.txt',
+ description='Python package for creating a dataset using InfluxDB data points',
+ long_description=open('README.txt').read(),
+ install_requires=[
+ "pandas",
+ "influxdb",
+ ],
+)
diff --git a/morphemic-persistent-storage/database/data/meta/meta.db b/morphemic-persistent-storage/database/data/meta/meta.db
index cfa772e05ee150b92492c912a03600ba77b07d97..1ffc2dba105f54095bffb34565de252b010024af 100644
Binary files a/morphemic-persistent-storage/database/data/meta/meta.db and b/morphemic-persistent-storage/database/data/meta/meta.db differ
diff --git a/morphemic-persistent-storage/database/entrypoint.sh b/morphemic-persistent-storage/database/entrypoint.sh
index 259480d1a17df8e55c08ce0e826da54ec7258e44..1f8135ec544c7db4688c2579d8460972c7188f47 100755
--- a/morphemic-persistent-storage/database/entrypoint.sh
+++ b/morphemic-persistent-storage/database/entrypoint.sh
@@ -10,7 +10,7 @@ if [ "$1" = 'influxd' ]; then
fi
# Run API
-python inputapi/app.py &
+python -u inputapi/app.py &
# Run InfluxDB
exec "$@"
diff --git a/morphemic-persistent-storage/database/inputapi/src/activemqlistermanager.py b/morphemic-persistent-storage/database/inputapi/src/activemqlistermanager.py
index d9cffc516202d0031e7eed20df2bc2031c9e29df..542c15dbb462463a5e1ab592b73a07fd35d7ef92 100644
--- a/morphemic-persistent-storage/database/inputapi/src/activemqlistermanager.py
+++ b/morphemic-persistent-storage/database/inputapi/src/activemqlistermanager.py
@@ -11,11 +11,11 @@ class Listener(object):
self.handler = handler
self.start = time.time()
- def on_error(self, headers, message):
- print("received an error %s" % message)
+ def on_error(self, frame):
+ print("received an error %s" % frame.body)
- def on_message(self, headers, message):
- self.handler(message)
+ def on_message(self, frame):
+ self.handler(frame.body)
class Worker(Thread):
@@ -78,8 +78,8 @@ class ActiveMQManager:
def __init__(self, handler):
self.all_threads = []
self.handler = handler
- thread_controller = Thread(target=self.workerController)
- thread_controller.start()
+ self.thread_controller = Thread(target=self.workerController)
+ self.thread_controller.start()
def getData(self, data):
if data_format == "json":
@@ -96,8 +96,8 @@ class ActiveMQManager:
print("Worker controller started")
while True:
for w in self.all_threads:
- if w.getStatus() == "stopped" or w.getStatus() == "error":
- w.stop()
+ if not w.is_alive() or w.getStatus() == "stopped" or w.getStatus() == "error":
+ #w.stop()
print("Worker {0} will restart in 5 seconds".format(w.getIndex()))
time.sleep(5)
w.start()
diff --git a/morphemic-persistent-storage/database/inputapi/src/app.py b/morphemic-persistent-storage/database/inputapi/src/app.py
index 9b63405b9621fd1ee2e5a7fc54a76a82422ede99..3e7755b15cec9adbea8c101165866846cbae11fd 100644
--- a/morphemic-persistent-storage/database/inputapi/src/app.py
+++ b/morphemic-persistent-storage/database/inputapi/src/app.py
@@ -420,7 +420,7 @@ class Ingestor(Thread):
fields = json.loads(content)
except Exception as e:
print("Cannot decode json")
- print("content", content)
+ #print("content", content)
return False
# self.tolerance_manager.addTime(fields["application"], fields["timestamp"])
application = fields[metric_name_field_application]
@@ -468,7 +468,7 @@ class Ingestor(Thread):
except Exception as e:
print("An Error occur while inserting data point")
print(e)
- print("content", point)
+ #print("content", point)
return False
@@ -630,7 +630,6 @@ class InputApi:
self.handleRequest(_json)
return True
except Exception as e:
- print(e)
print("Non JSON content received")
return None
self.ingestor.addToList(data)
diff --git a/morphemic-persistent-storage/dataset-builder/datasets/demo.csv b/morphemic-persistent-storage/dataset-builder/datasets/demo.csv
index 81c70a4fbe295c5819783ebe9831989576304dd4..bb4309ff6ffbc9090b6a8e5403bb7e7fc477293d 100644
--- a/morphemic-persistent-storage/dataset-builder/datasets/demo.csv
+++ b/morphemic-persistent-storage/dataset-builder/datasets/demo.csv
@@ -1,258 +1,28 @@
time,cpu_usage,latency,level,memory,response_time
-1614276292,9.906976744186046,99.75999999999999,1,14.720930232558139,344
-1614276297,9.329113924050633,45.82,1,24.9873417721519,158
-1614276303,4.90625,74.24,1,21.71875,256
-1614276308,12.496503496503497,82.94,1,12.48951048951049,286
-1614276313,8.688172043010752,107.88,1,9.064516129032258,372
-1614276318,13.26797385620915,88.74,1,13.803921568627452,306
-1614276323,7.577319587628866,112.52,1,11.731958762886599,388
-1614276328,9.623188405797102,80.03999999999999,1,20.869565217391305,276
-1614276333,12.816793893129772,75.97999999999999,1,15.450381679389313,262
-1614276338,5.840909090909091,102.08,1,9.522727272727273,352
-1614276343,7.26797385620915,88.74,1,16.80392156862745,306
-1614276348,6.378378378378379,85.83999999999999,1,12.135135135135135,296
-1614276353,6.03030303030303,95.69999999999999,1,10.090909090909092,330
-1614276358,9.05050505050505,57.419999999999995,1,25.151515151515152,198
-1614276363,8.329113924050633,45.82,1,28.9873417721519,158
-1614276368,7.181347150259067,55.97,1,21.5440414507772,193
-1614276373,13.367003367003367,86.13,1,20.1010101010101,297
-1614276378,9.690036900369003,78.58999999999999,1,17.070110701107012,271
-1614276383,4.5188916876574305,115.13,1,8.556675062972293,397
-1614276388,10.246376811594203,40.019999999999996,1,25.73913043478261,138
-1614276393,10.5662100456621,63.51,1,16.698630136986303,219
-1614276398,12.620689655172415,33.64,1,29.862068965517242,116
-1614276403,6.219409282700422,68.72999999999999,1,21.658227848101266,237
-1614276408,14.8125,37.12,1,27.4375,128
-1614276413,8.754820936639119,105.27,1,10.264462809917354,363
-1614276418,10.52112676056338,82.36,1,16.56338028169014,284
-1614276423,3.6666666666666665,108.74999999999999,1,17.0,375
-1614276428,14.545454545454547,63.8,1,20.636363636363637,220
-1614276433,10.666666666666666,108.74999999999999,1,17.0,375
-1614276438,9.278688524590164,88.44999999999999,1,10.836065573770492,305
-1614276443,15.620689655172415,33.64,1,35.86206896551724,116
-1614276448,16.41025641025641,45.239999999999995,1,23.23076923076923,156
-1614276453,8.311258278145695,87.58,1,19.933774834437088,302
-1614276458,10.590673575129534,111.94,1,9.7720207253886,386
-1614276463,8.690036900369003,78.58999999999999,1,16.070110701107012,271
-1614276468,4.808988764044944,103.24,1,16.426966292134832,356
-1614276473,13.849557522123893,32.769999999999996,1,32.54867256637168,113
-1614276478,16.46268656716418,38.86,1,29.388059701492537,134
-1614276483,5.710027100271002,107.00999999999999,1,15.130081300813009,369
-1614276488,4.460207612456747,83.80999999999999,1,12.380622837370241,289
-1614276493,13.928571428571429,32.48,1,29.785714285714285,112
-1614276498,16.25925925925926,31.319999999999997,1,28.77777777777778,108
-1614276503,7.525252525252525,114.83999999999999,1,8.575757575757576,396
-1614276508,9.878048780487806,59.449999999999996,1,16.634146341463413,205
-1614276513,12.936507936507937,36.54,1,28.80952380952381,126
-1614276518,12.164556962025316,91.64,1,18.49367088607595,316
-1614276523,10.604166666666666,111.35999999999999,1,17.8125,384
-1614276528,11.702702702702702,107.3,1,13.108108108108109,370
-1614276533,9.34448160535117,86.71,1,14.033444816053512,299
-1614276538,12.407407407407408,39.15,1,30.22222222222222,135
-1614276543,7.739336492890995,61.19,1,21.218009478672986,211
-1614276548,10.012048192771084,96.27999999999999,1,14.036144578313253,332
-1614276553,7.944444444444445,41.76,1,30.833333333333332,144
-1614276558,9.444444444444445,65.25,1,16.333333333333336,225
-1614276563,10.91545189504373,99.47,1,10.746355685131196,343
-1614276568,10.19672131147541,35.379999999999995,1,26.59016393442623,122
-1614276573,17.352941176470587,39.44,1,31.058823529411764,136
-1614276578,10.832861189801699,102.36999999999999,1,14.498583569405099,353
-1614276583,4.322259136212624,87.28999999999999,1,15.966777408637874,301
-1614276588,8.703703703703704,78.3,1,12.11111111111111,270
-1614276593,8.976190476190476,97.44,1,18.92857142857143,336
-1614276598,5.840909090909091,102.08,1,14.522727272727273,352
-1614276604,19.0,28.999999999999996,1,40.0,100
-1614276609,6.25531914893617,68.14999999999999,1,20.76595744680851,235
-1614276614,8.703703703703704,78.3,1,18.11111111111111,270
-1614276619,10.577319587628866,112.52,1,12.731958762886599,388
-1614276624,12.215434083601286,90.19,1,16.646302250803856,311
-1614276629,10.5,115.99999999999999,1,12.5,400
-1614276634,9.219409282700422,68.72999999999999,1,13.658227848101266,237
-1614276639,6.291005291005291,54.809999999999995,1,23.873015873015873,189
-1614276644,13.115264797507788,93.08999999999999,1,12.345794392523365,321
-1614276649,4.604166666666666,111.35999999999999,1,16.8125,384
-1614276653,12.555555555555555,52.199999999999996,1,24.666666666666668,180
-1614276658,6.081632653061225,71.05,1,16.244897959183675,245
-1614276663,7.5188916876574305,115.13,1,12.556675062972293,397
-1614276668,7.25531914893617,68.14999999999999,1,22.76595744680851,235
-1614276673,12.754820936639119,105.27,1,11.264462809917354,363
-1614276678,10.347826086956522,66.69999999999999,1,18.043478260869563,230
-1614276683,13.636363636363637,79.75,1,20.909090909090907,275
-1614276688,7.105590062111801,93.38,1,11.316770186335404,322
-1614276693,9.531645569620252,114.55,1,16.594936708860757,395
-1614276698,9.41296928327645,84.97,1,16.238907849829353,293
-1614276703,10.115264797507788,93.08999999999999,1,16.345794392523366,321
-1614276708,11.597122302158272,80.61999999999999,1,18.79136690647482,278
-1614276713,7.62962962962963,62.63999999999999,1,16.88888888888889,216
-1614276718,13.278688524590164,88.44999999999999,1,15.836065573770492,305
-1614276723,4.289473684210526,88.16,1,11.868421052631579,304
-1614276728,12.184713375796179,91.05999999999999,1,19.554140127388536,314
-1614276733,9.98406374501992,72.78999999999999,1,19.95219123505976,251
-1614276738,12.424778761061948,65.53999999999999,1,17.27433628318584,226
-1614276743,8.846153846153847,75.39999999999999,1,21.53846153846154,260
-1614276748,11.493506493506494,44.66,1,24.48051948051948,154
-1614276753,5.67379679144385,108.46,1,13.02139037433155,374
-1614276758,14.299270072992702,39.73,1,28.8978102189781,137
-1614276763,18.620689655172413,33.64,1,29.862068965517242,116
-1614276768,8.3003300330033,87.86999999999999,1,18.900990099009903,303
-1614276773,11.717391304347826,106.72,1,11.152173913043478,368
-1614276778,14.369426751592357,45.529999999999994,1,20.10828025477707,157
-1614276783,10.201680672268907,69.02,1,15.605042016806722,238
-1614276788,6.347593582887701,54.23,1,22.0427807486631,187
-1614276793,6.546099290780141,81.78,1,18.638297872340424,282
-1614276798,7.434782608695652,53.36,1,20.304347826086957,184
-1614276803,7.645502645502646,109.61999999999999,1,14.936507936507937,378
-1614276808,10.508771929824562,82.64999999999999,1,18.526315789473685,285
-1614276813,4.773584905660377,76.85,1,14.320754716981131,265
-1614276818,11.128205128205128,56.55,1,23.384615384615387,195
-1614276823,13.128205128205128,56.55,1,22.384615384615387,195
-1614276828,8.289473684210526,88.16,1,17.86842105263158,304
-1614276833,8.545454545454547,63.8,1,22.636363636363637,220
-1614276838,11.448275862068964,84.1,1,17.344827586206897,290
-1614276843,10.030303030303031,95.69999999999999,1,13.090909090909092,330
-1614276848,5.745318352059925,77.42999999999999,1,21.235955056179776,267
-1614276853,12.264462809917354,35.089999999999996,1,33.79338842975207,121
-1614276858,14.154639175257731,56.26,1,25.463917525773198,194
-1614276863,6.555555555555555,52.199999999999996,1,19.666666666666668,180
-1614276868,11.906976744186046,99.75999999999999,1,17.72093023255814,344
-1614276873,5.03951367781155,95.41,1,16.118541033434653,329
-1614276878,12.91715976331361,49.01,1,27.75147928994083,169
-1614276883,9.944444444444445,41.76,1,30.833333333333332,144
-1614276888,14.424778761061948,65.53999999999999,1,16.27433628318584,226
-1614276893,9.578947368421051,44.08,1,23.736842105263158,152
-1614276899,7.181347150259067,55.97,1,21.5440414507772,193
-1614276904,4.610966057441253,111.07,1,16.83289817232376,383
-1614276909,10.448275862068964,84.1,1,18.344827586206897,290
-1614276914,11.608294930875577,62.92999999999999,1,20.824884792626726,217
-1614276919,7.225806451612903,89.89999999999999,1,15.67741935483871,310
-1614276924,12.367003367003367,86.13,1,13.1010101010101,297
-1614276929,12.847953216374268,49.589999999999996,1,26.54385964912281,171
-1614276934,9.064516129032258,35.96,1,32.193548387096776,124
-1614276939,9.846153846153847,75.39999999999999,1,15.538461538461538,260
-1614276944,8.91715976331361,49.01,1,21.75147928994083,169
-1614276949,9.777777777777779,104.39999999999999,1,12.333333333333334,360
-1614276954,14.608294930875577,62.92999999999999,1,16.824884792626726,217
-1614276959,8.714285714285715,50.75,1,20.142857142857142,175
-1614276964,4.638522427440633,109.91,1,13.915567282321899,379
-1614276969,9.389830508474576,85.55,1,15.169491525423728,295
-1614276974,11.3003300330033,87.86999999999999,1,16.900990099009903,303
-1614276979,14.993006993006993,41.47,1,27.97902097902098,143
-1614276984,11.816793893129772,75.97999999999999,1,12.450381679389313,262
-1614276989,13.584229390681003,80.91,1,15.75268817204301,279
-1614276994,9.097560975609756,47.559999999999995,1,21.29268292682927,164
-1614276999,11.237288135593221,68.44,1,21.71186440677966,236
-1614277004,14.714285714285715,50.75,1,18.142857142857142,175
-1614277009,11.181347150259068,55.97,1,17.5440414507772,193
-1614277014,8.558718861209965,81.49,1,14.676156583629894,281
-1614277019,4.610966057441253,111.07,1,16.83289817232376,383
-1614277024,8.367003367003367,86.13,1,19.1010101010101,297
-1614277029,8.802721088435373,42.629999999999995,1,24.408163265306122,147
-1614277034,13.504504504504505,64.38,1,22.513513513513516,222
-1614277039,13.064516129032258,35.96,1,31.193548387096776,124
-1614277044,6.716981132075472,61.48,1,24.150943396226417,212
-1614277049,5.610966057441253,111.07,1,12.832898172323759,383
-1614277054,8.787878787878789,76.55999999999999,1,12.363636363636363,264
-1614277059,6.531645569620253,114.55,1,9.594936708860759,395
-1614277064,12.73972602739726,105.85,1,11.219178082191782,365
-1614277069,7.389830508474576,85.55,1,14.169491525423728,295
-1614277074,7.149377593360996,69.89,1,19.448132780082986,241
-1614277079,12.291005291005291,54.809999999999995,1,20.873015873015873,189
-1614277083,17.936507936507937,36.54,1,30.80952380952381,126
-1614277088,4.617801047120419,110.77999999999999,1,9.853403141361255,382
-1614277093,7.086419753086419,93.96,1,16.25925925925926,324
-1614277098,5.832861189801699,102.36999999999999,1,13.498583569405099,353
-1614277103,11.246376811594203,40.019999999999996,1,25.73913043478261,138
-1614277108,12.322259136212624,87.28999999999999,1,17.966777408637874,301
-1614277113,5.067484662576687,94.53999999999999,1,10.202453987730062,326
-1614277118,10.710027100271002,107.00999999999999,1,12.130081300813009,369
-1614277123,14.615384615384615,30.159999999999997,1,30.846153846153847,104
-1614277128,7.1020408163265305,56.839999999999996,1,23.306122448979593,196
-1614277133,10.26797385620915,88.74,1,17.80392156862745,306
-1614277138,7.617801047120419,110.77999999999999,1,13.853403141361255,382
-1614277143,18.345794392523366,31.029999999999998,1,33.03738317757009,107
-1614277148,13.493506493506494,44.66,1,25.48051948051948,154
-1614277153,13.299270072992702,39.73,1,27.8978102189781,137
-1614277158,13.311258278145695,87.58,1,13.933774834437086,302
-1614277163,15.74712643678161,50.459999999999994,1,20.24137931034483,174
-1614277168,12.04225352112676,41.18,1,25.12676056338028,142
-1614277173,10.564102564102564,113.1,1,8.692307692307693,390
-1614277178,14.555555555555555,52.199999999999996,1,19.666666666666668,180
-1614277183,11.816793893129772,75.97999999999999,1,15.450381679389313,262
-1614277188,3.6595744680851063,109.03999999999999,1,9.97872340425532,376
-1614277193,12.717391304347826,106.72,1,11.152173913043478,368
-1614277198,11.824858757062147,102.66,1,17.474576271186443,354
-1614277203,12.03951367781155,95.41,1,12.118541033434651,329
-1614277209,8.310344827586206,67.28,1,14.931034482758621,232
-1614277214,10.62962962962963,62.63999999999999,1,22.88888888888889,216
-1614277219,10.950495049504951,58.58,1,23.851485148514854,202
-1614277224,3.881844380403458,100.63,1,13.645533141210375,347
-1614277229,14.474576271186441,34.22,1,33.42372881355932,118
-1614277234,10.236245954692556,89.61,1,18.70873786407767,309
-1614277239,12.291845493562231,67.57,1,13.875536480686696,233
-1614277244,11.003003003003002,96.57,1,11.00900900900901,333
-1614277249,7.610966057441253,111.07,1,17.83289817232376,383
-1614277254,9.04225352112676,41.18,1,24.12676056338028,142
-1614277259,9.154574132492113,91.92999999999999,1,14.46372239747634,317
-1614277264,4.590673575129534,111.94,1,8.7720207253886,386
-1614277269,13.032258064516128,71.92,1,15.096774193548388,248
-1614277274,4.739726027397261,105.85,1,17.21917808219178,365
-1614277279,8.205128205128204,90.47999999999999,1,19.615384615384613,312
-1614277284,7.257328990228013,89.02999999999999,1,19.77198697068404,307
-1614277289,10.299270072992702,39.73,1,28.8978102189781,137
-1614277294,12.329113924050633,45.82,1,24.9873417721519,158
-1614277299,6.132231404958677,70.17999999999999,1,20.396694214876035,242
-1614277304,5.6900369003690034,78.58999999999999,1,16.070110701107012,271
-1614277309,10.873563218390805,100.91999999999999,1,10.620689655172415,348
-1614277314,9.236245954692556,89.61,1,10.70873786407767,309
-1614277319,14.405286343612335,65.83,1,23.215859030837002,227
-1614277324,11.194244604316546,40.309999999999995,1,31.58273381294964,139
-1614277329,12.610108303249097,80.33,1,11.830324909747292,277
-1614277334,6.34448160535117,86.71,1,17.03344481605351,299
-1614277339,13.494505494505495,52.779999999999994,1,24.483516483516482,182
-1614277344,11.125,92.8,1,19.375,320
-1614277349,11.134969325153374,47.269999999999996,1,21.404907975460123,163
-1614277354,12.115264797507788,93.08999999999999,1,17.345794392523366,321
-1614277359,9.025125628140703,57.709999999999994,1,24.075376884422113,199
-1614277364,14.524886877828054,64.08999999999999,1,16.574660633484164,221
-1614277369,10.832861189801699,102.36999999999999,1,9.498583569405099,353
-1614277374,16.211180124223603,46.69,1,28.633540372670808,161
-1614277379,7.694835680751174,61.769999999999996,1,22.08450704225352,213
-1614277384,6.949852507374631,98.30999999999999,1,13.849557522123893,339
-1614277389,13.434782608695652,53.36,1,25.304347826086957,184
-1614277394,6.5188916876574305,115.13,1,16.556675062972293,397
-1614277399,8.367003367003367,86.13,1,13.1010101010101,297
-1614277404,13.291845493562231,67.57,1,20.875536480686698,233
-1614277409,5.777777777777778,104.39999999999999,1,17.333333333333336,360
-1614277414,8.11522633744856,70.47,1,20.345679012345677,243
-1614277419,12.857142857142858,101.5,1,15.571428571428571,350
-1614277424,5.784688995215311,60.60999999999999,1,15.354066985645932,209
-1614277429,11.076923076923077,94.25,1,10.23076923076923,325
-1614277434,13.58659217877095,51.91,1,17.75977653631285,179
-1614277439,10.881844380403457,100.63,1,13.645533141210375,347
-1614277444,17.0,28.999999999999996,1,39.0,100
-1614277449,12.900990099009901,29.29,1,35.70297029702971,101
-1614277454,12.832861189801699,102.36999999999999,1,18.4985835694051,353
-1614277459,11.451612903225806,44.949999999999996,1,27.35483870967742,155
-1614277464,10.090909090909092,31.9,1,37.27272727272727,110
-1614277469,15.333333333333334,34.8,1,35.0,120
-1614277474,18.695652173913043,33.349999999999994,1,32.086956521739125,115
-1614277479,13.021148036253777,95.99,1,13.06344410876133,331
-1614277484,9.132231404958677,70.17999999999999,1,18.396694214876035,242
-1614277489,10.745318352059925,77.42999999999999,1,15.235955056179776,267
-1614277494,13.424657534246576,84.67999999999999,1,15.273972602739725,292
-1614277499,11.575757575757576,38.279999999999994,1,25.727272727272727,132
-1614277504,5.652519893899204,109.33,1,13.957559681697614,377
-1614277509,5.787878787878788,76.55999999999999,1,14.363636363636363,264
-1614277515,14.854368932038835,59.739999999999995,1,19.563106796116507,206
-1614277520,8.524886877828054,64.08999999999999,1,14.574660633484163,221
-1614277525,8.967359050445104,97.72999999999999,1,10.90207715133531,337
-1614277530,7.2631578947368425,55.099999999999994,1,16.789473684210527,190
-1614277535,8.878048780487806,59.449999999999996,1,16.634146341463413,205
-1614277540,4.857142857142858,101.5,1,17.57142857142857,350
-1614277545,12.813953488372093,49.879999999999995,1,22.441860465116278,172
-1614277549,7.754820936639119,105.27,1,12.264462809917354,363
-1614277554,11.890173410404625,100.33999999999999,1,11.670520231213873,346
-1614277559,14.46268656716418,38.86,1,29.388059701492537,134
-1614277564,10.174311926605505,31.61,1,31.522935779816514,109
-1614277569,5.651162790697675,62.349999999999994,1,20.953488372093023,215
-1614277574,16.211180124223603,46.69,1,22.633540372670808,161
+1618826071,10.770083102493075,104.69,1,15.310249307479225,361
+1618826076,19.174311926605505,31.61,1,32.522935779816514,109
+1618826081,6.184100418410042,69.31,1,22.552301255230127,239
+1618826086,4.412969283276451,84.97,1,14.238907849829351,293
+1618826091,15.263157894736842,55.099999999999994,1,24.789473684210527,190
+1618826096,16.493506493506494,44.66,1,25.48051948051948,154
+1618826101,3.6455026455026456,109.61999999999999,1,15.936507936507937,378
+1618826106,5.424657534246576,84.67999999999999,1,16.273972602739725,292
+1618826111,16.25925925925926,31.319999999999997,1,36.77777777777778,108
+1618826116,10.900990099009901,29.29,1,35.70297029702971,101
+1618826121,10.025125628140703,57.709999999999994,1,25.075376884422113,199
+1618826126,5.695417789757412,107.58999999999999,1,16.086253369272235,371
+1618826131,10.710027100271002,107.00999999999999,1,13.130081300813009,369
+1618826136,13.184713375796179,91.05999999999999,1,13.554140127388536,314
+1618826141,5.205128205128205,90.47999999999999,1,10.615384615384615,312
+1618826146,4.448275862068965,84.1,1,16.344827586206897,290
+1618826151,11.928571428571429,32.48,1,29.785714285714285,112
+1618826156,15.433962264150944,30.74,1,37.301886792452834,106
+1618826161,15.874015748031496,36.83,1,25.62204724409449,127
+1618826166,11.201680672268907,69.02,1,17.60504201680672,238
+1618826171,9.205128205128204,90.47999999999999,1,14.615384615384615,312
+1618826176,5.436426116838488,84.39,1,16.309278350515463,291
+1618826181,10.807692307692307,60.31999999999999,1,24.423076923076923,208
+1618826186,5.90625,74.24,1,17.71875,256
+1618826191,15.090909090909092,31.9,1,29.272727272727273,110
+1618826196,8.949852507374631,98.30999999999999,1,13.849557522123893,339
+1618826201,15.62251655629139,43.79,1,26.867549668874172,151
diff --git a/morphemic-persistent-storage/docker-compose.yaml b/morphemic-persistent-storage/docker-compose.yaml
index 4bb8e453bcd1e5ed494d21062ca2884dc0fdc100..e38593b623294f74483a4cd7413b37b6b62fe2bf 100644
--- a/morphemic-persistent-storage/docker-compose.yaml
+++ b/morphemic-persistent-storage/docker-compose.yaml
@@ -10,6 +10,8 @@ services:
- "./database/.env"
volumes:
- "./database/data:/var/lib/influxdb"
+ ports:
+ - 8086:8086
publisher:
image: jdtotow/publisher
container_name: publisher
diff --git a/morphemic-persistent-storage/example/influxdb_querier.py b/morphemic-persistent-storage/example/influxdb_querier.py
index 079bc7f01cceeab639b260413ef9db79e0259338..5602d7cc7405b5120882178cec1d7df10313cfb6 100644
--- a/morphemic-persistent-storage/example/influxdb_querier.py
+++ b/morphemic-persistent-storage/example/influxdb_querier.py
@@ -8,7 +8,7 @@ url = "http://localhost:8086"
username = "morphemic"
password = "password"
database = "morphemic"
-application = 'application-1'
+application = 'demo'
params = '-sS'
headers = {'Accept': 'application/csv', 'Content-type': 'application/vnd.flux','Authorization': 'Token '+username+':'+password}
diff --git a/morphemic-persistent-storage/example/publisher.py b/morphemic-persistent-storage/example/publisher.py
index d1a4ff6bfe68cbff89a050c3e48eed02a109ffb6..b062483573177b3f2689394da99ba59beb4a7a25 100644
--- a/morphemic-persistent-storage/example/publisher.py
+++ b/morphemic-persistent-storage/example/publisher.py
@@ -10,8 +10,7 @@ password = os.getenv("ACTIVEMQ_PASSWORD") or "111"
host = os.getenv("ACTIVEMQ_HOST") or "localhost"
port = os.getenv("ACTIVEMQ_PORT") or 61613
application_name = os.environ.get("APPLICATION_NAME","demo")
-destination = sys.argv[1:2] or ["/queue/static-topic-1"]
-destination = destination[0]
+destination = "static-topic-1"
connected = False
conn = None
diff --git a/morphemic-persistent-storage/example/subscriber.py b/morphemic-persistent-storage/example/subscriber.py
index 8a5b0935056c58eb88fce65d73ab521e5daa98b6..d475a2c7b4d288ca7812219675b451b62c8bbdf9 100644
--- a/morphemic-persistent-storage/example/subscriber.py
+++ b/morphemic-persistent-storage/example/subscriber.py
@@ -9,8 +9,8 @@ user = os.getenv("ACTIVEMQ_USER") or "aaa"
password = os.getenv("ACTIVEMQ_PASSWORD") or "111"
host = os.getenv("ACTIVEMQ_HOST") or "localhost"
port = os.getenv("ACTIVEMQ_PORT") or 61613
-destination = sys.argv[1:2] or ["/queue/persistent_storage"]
-topic = 'tester'
+destination = sys.argv[1:2] or ["/topic/static-topic-1"]
+topic = 'static-topic-1'
destination = destination[0]
@@ -22,8 +22,8 @@ class Listener(object):
def on_error(self, headers, message):
print('received an error %s' % message)
- def on_message(self, headers, message):
- print(message)
+ def on_message(self, frame):
+ print(type(frame.body))
def sendSubscription():
conn = stomp.Connection(host_and_ports = [(host, port)])
@@ -43,7 +43,7 @@ def listen():
while True:
time.sleep(2)
-sendSubscription()
+#sendSubscription()
listen()
#curl -X POST -d '{"application":"application_test","start":"10m"}' -H 'Content-type:application/json' http://localhost:8767/api/v1/make
\ No newline at end of file
diff --git a/scheduling-abstraction-layer/pom.xml b/scheduling-abstraction-layer/pom.xml
index c1337e423c97f8b869c0ed1b9d701b85323580c8..d5e1256180357307d8c67bada3135d47f44f409b 100644
--- a/scheduling-abstraction-layer/pom.xml
+++ b/scheduling-abstraction-layer/pom.xml
@@ -23,12 +23,32 @@
rest-smartproxy
11.1.0-SNAPSHOT
compile
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
org.ow2.proactive_grid_cloud_portal
rest-api
11.1.0-SNAPSHOT
compile
+
+
+ org.slf4j
+ slf4j-log4j12
+
+
+ org.springframework
+ spring-expression
+
+
+ org.springframework
+ spring-core
+
+
commons-beanutils
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
index 169eebfbc8f3c5edacb765b14d4e564d60d8787d..f02b1d598b7eafc9833a25086cbd133b142b31ed 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/PAGateway.java
@@ -1,5 +1,6 @@
package org.activeeon.morphemic;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.activeeon.morphemic.application.deployment.PAFactory;
import org.activeeon.morphemic.application.deployment.PASchedulerGateway;
import org.activeeon.morphemic.infrastructure.deployment.PAConnectorIaasGateway;
@@ -8,9 +9,9 @@ import org.activeeon.morphemic.model.*;
import org.activeeon.morphemic.service.*;
import org.apache.commons.lang3.Validate;
import org.apache.log4j.Logger;
-import org.codehaus.jackson.map.ObjectMapper;
import org.json.JSONArray;
import org.json.JSONObject;
+import org.ow2.proactive.resourcemanager.common.event.RMNodeEvent;
import org.ow2.proactive.resourcemanager.exception.RMException;
import org.ow2.proactive.scheduler.common.exception.NotConnectedException;
import org.ow2.proactive.scheduler.common.exception.UserException;
@@ -34,16 +35,21 @@ import java.security.KeyException;
import java.util.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
+import java.util.stream.IntStream;
public class PAGateway {
private final String paURL;
- public PAResourceManagerGateway resourceManagerGateway;
+ public final PAResourceManagerGateway resourceManagerGateway;
- public PASchedulerGateway schedulerGateway;
+ public final PASchedulerGateway schedulerGateway;
- public PAConnectorIaasGateway connectorIaasGateway;
+ public final PAConnectorIaasGateway connectorIaasGateway;
+
+ private final String NEW_LINE = System.getProperty("line.separator");
+
+ private final String SCRIPTS_SEPARATION = NEW_LINE + NEW_LINE + "# Main script" + NEW_LINE;
private static final Logger LOGGER = Logger.getLogger(PAGateway.class);
@@ -75,6 +81,7 @@ public class PAGateway {
/**
* Disconnect from the ProActive server
+ * @throws NotConnectedException In case the user is not connected
*/
public void disconnect() throws NotConnectedException {
LOGGER.debug("Disconnecting from RM...");
@@ -308,6 +315,45 @@ public class PAGateway {
return filteredNodeCandidates;
}
+ /**
+ * This function returns the number of available node candidates according to the added clouds
+ * @return the number of available node candidates
+ */
+ public int getLengthOfNodeCandidates() {
+ List allNodeCandidates = EntityManagerHelper.createQuery("SELECT nc FROM NodeCandidate nc",
+ NodeCandidate.class).getResultList();
+ return allNodeCandidates.size();
+ }
+
+ /**
+ * This function returns the list of all available images related to a registered cloud
+ * @param cloudID A valid cloud identifier
+ * @return A list of available images
+ */
+ public List getAllCloudImages(String cloudID) {
+ List filteredImages = new LinkedList<>();
+ List allImages = EntityManagerHelper.createQuery("SELECT img FROM Image img",
+ Image.class).getResultList();
+ PACloud paCloud = EntityManagerHelper.find(PACloud.class, cloudID);
+ JSONArray imagesArray = connectorIaasGateway.getImages(paCloud.getDummyInfrastructureName());
+ List imagesIDs = IntStream.range(0, imagesArray.length())
+ .mapToObj(imagesArray::get)
+ .map(blaBla -> ((JSONObject)blaBla).optString("id")).collect(Collectors.toList());
+ allImages.stream().filter(blaTest -> imagesIDs.contains(blaTest.getId())).forEach(filteredImages::add);
+ return filteredImages;
+ }
+
+ /**
+ * This function returns a list of available VMs
+ * @return rmNodeEvents a list of available Nodes and their associate parameters
+ * @throws NotConnectedException In case the user is not connected
+ * @throws PermissionRestException In case the user does not have valid permissions
+ */
+ public List getListOfActiveVMs() throws NotConnectedException, PermissionRestException {
+ List rmNodeEvents = resourceManagerGateway.getListOfNodesEvents();
+ return rmNodeEvents;
+ }
+
/**
* Define a node source in PA server related to a deployment information
* @param nodeSourceName A valid and unique node source name
@@ -366,6 +412,7 @@ public class PAGateway {
/**
* Add an EMS deployment to a defined job
+ * @param nodeNames Names of the nodes to which to add EMS deployment
* @param authorizationBearer The authorization bearer used by upperware's components to authenticate with each other. Needed by the EMS.
* @return return 0 if the deployment task is properly added.
*/
@@ -424,18 +471,20 @@ public class PAGateway {
cloud.addDeployedRegion(newDeployment.getLocationName(),
newDeployment.getLocationName() + "/" + newDeployment.getImageProviderId());
}
-
LOGGER.info("Node source defined.");
+ LOGGER.info("Trying to retrieve task: " + node.optString("taskName"));
+ Task task = EntityManagerHelper.find(Job.class, jobId).findTask(node.optString("taskName"));
+
newDeployment.setPaCloud(cloud);
+ newDeployment.setTask(task);
+ newDeployment.setNumber(task.getNextDeploymentID());
EntityManagerHelper.persist(newDeployment);
LOGGER.debug("Deployment created: " + newDeployment.toString());
EntityManagerHelper.persist(cloud);
LOGGER.info("Deployment added to the related cloud: " + cloud.toString());
- LOGGER.info("Trying to retrieve task: " + node.optString("taskName"));
- Task task = EntityManagerHelper.find(Job.class, jobId).findTask(node.optString("taskName"));
task.addDeployment(newDeployment);
EntityManagerHelper.persist(task);
});
@@ -465,6 +514,16 @@ public class PAGateway {
});
}
+ /**
+ * Get all registered clouds
+ * @return List of all table PACloud's entries
+ */
+ public List getAllClouds() {
+ return EntityManagerHelper.createQuery("SELECT pac FROM PACloud pac", PACloud.class).getResultList();
+ }
+
+
+
/**
* Remove clouds
* @param cloudIDs List of cloud IDs to remove
@@ -474,18 +533,36 @@ public class PAGateway {
EntityManagerHelper.begin();
cloudIDs.forEach(cloudID -> {
PACloud cloud = EntityManagerHelper.find(PACloud.class, cloudID);
+ LOGGER.info("Removing cloud : " + cloud.toString());
for (Map.Entry entry : cloud.getDeployedRegions().entrySet()) {
try {
- resourceManagerGateway.removeNodeSource(cloud.getNodeSourceNamePrefix() + entry.getKey(), preempt);
+ String nodeSourceName = cloud.getNodeSourceNamePrefix() + entry.getKey();
+ LOGGER.info("Removing node source " + nodeSourceName + " from the ProActive server.");
+ resourceManagerGateway.removeNodeSource(nodeSourceName, preempt);
} catch (NotConnectedException | PermissionRestException e) {
LOGGER.error(e.getStackTrace());
}
}
+ if (cloud.getDeployments() != null) {
+ LOGGER.info("Cleaning deployments from related tasks " + cloud.getDeployments().toString());
+ cloud.getDeployments().forEach(deployment -> deployment.getTask().removeDeployment(deployment));
+ }
+ LOGGER.info("Cleaning deployments from the cloud entry");
+ cloud.clearDeployments();
EntityManagerHelper.remove(cloud);
+ LOGGER.info("Cloud removed.");
});
EntityManagerHelper.commit();
}
+ /**
+ * Get all added nodes
+ * @return List of all table Deployment's entries
+ */
+ public List getAllNodes() {
+ return EntityManagerHelper.createQuery("SELECT d FROM Deployment d", Deployment.class).getResultList();
+ }
+
/**
* Remove nodes
* @param nodeNames List of node names to remove
@@ -494,7 +571,7 @@ public class PAGateway {
public void removeNodes(List nodeNames, Boolean preempt) {
nodeNames.forEach(nodeName -> {
try {
- String nodeUrl = resourceManagerGateway.searchNodes(nodeNames, true).get(0);
+ String nodeUrl = resourceManagerGateway.searchNodes(Collections.singletonList(nodeName), true).get(0);
resourceManagerGateway.removeNode(nodeUrl, preempt);
LOGGER.info("Node " + nodeName + " with URL: " + nodeUrl + " has been removed successfully.");
} catch (NotConnectedException | RestException e) {
@@ -598,8 +675,6 @@ public class PAGateway {
}
private List createCommandsTask(Task task, String taskNameSuffix, String taskToken, Job job) {
- final String newLine = System.getProperty("line.separator");
- final String scriptsSeparation = newLine + newLine + "# Main script" + newLine;
List scriptTasks = new LinkedList<>();
ScriptTask scriptTaskStart = null;
ScriptTask scriptTaskInstall = null;
@@ -616,7 +691,7 @@ public class PAGateway {
task.getInstallation().getPostInstall().isEmpty())) {
if (!task.getInstallation().getInstall().isEmpty()) {
scriptTaskInstall = PAFactory.createBashScriptTask(task.getName() + "_install" + taskNameSuffix,
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getInstall());
} else {
scriptTaskInstall = PAFactory.createBashScriptTask(task.getName() + "_install" + taskNameSuffix,
@@ -625,13 +700,13 @@ public class PAGateway {
if (!task.getInstallation().getPreInstall().isEmpty()) {
scriptTaskInstall.setPreScript(PAFactory.createSimpleScript(
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getPreInstall(),
"bash"));
}
if (!task.getInstallation().getPostInstall().isEmpty()) {
scriptTaskInstall.setPostScript(PAFactory.createSimpleScript(
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getPostInstall(),
"bash"));
}
@@ -647,7 +722,7 @@ public class PAGateway {
task.getInstallation().getPostStart().isEmpty())) {
if (!task.getInstallation().getStart().isEmpty()) {
scriptTaskStart = PAFactory.createBashScriptTask(task.getName() + "_start" + taskNameSuffix,
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getStart());
} else {
scriptTaskStart = PAFactory.createBashScriptTask(task.getName() + "_start" + taskNameSuffix,
@@ -656,13 +731,13 @@ public class PAGateway {
if (!task.getInstallation().getPreStart().isEmpty()) {
scriptTaskStart.setPreScript(PAFactory.createSimpleScript(
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getPreStart(),
"bash"));
}
if (!task.getInstallation().getPostStart().isEmpty()) {
scriptTaskStart.setPostScript(PAFactory.createSimpleScript(
- Utils.getContentWithFileName("export_env_var_script.sh") + scriptsSeparation +
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
task.getInstallation().getPostStart(),
"bash"));
}
@@ -696,7 +771,7 @@ public class PAGateway {
nodeConfigJson += "\"}";
} else {
try {
- nodeConfigJson += "\", \"portToOpens\": " + mapper.writeValueAsString(task.getPortsToOpen()) + "}";
+ nodeConfigJson += "\", \"portsToOpen\": " + mapper.writeValueAsString(task.getPortsToOpen()) + "}";
} catch (IOException e) {
LOGGER.error(e.getStackTrace());
}
@@ -736,8 +811,9 @@ public class PAGateway {
LOGGER.error(String.format("Job [%s] not found", jobId));
return 1;
}
+
// Let's find the task:
- Optional optTask = Optional.ofNullable(EntityManagerHelper.find(Task.class,optJob.get().findTask(taskName)));
+ Optional optTask = Optional.ofNullable(optJob.get().findTask(taskName));
if (!optTask.isPresent()) {
LOGGER.error(String.format("Task [%s] not found", taskName));
return 1;
@@ -750,30 +826,272 @@ public class PAGateway {
return 2;
}
- // Let's clone the deployment/node as needed.
+ // Saving suffix IDs of new nodes
+ List newNodesNumbers = new LinkedList<>();
+
+ // Let's clone the deployment/node as needed
Deployment oldDeployment = optDeployment.get();
nodeNames.stream().map(nodeName -> {
- Deployment newDeployment = new Deployment();
- newDeployment.setPaCloud(oldDeployment.getPaCloud());
- newDeployment.setNodeName(nodeName);
- newDeployment.setLocationName(oldDeployment.getLocationName());
- newDeployment.setIsDeployed(false);
- newDeployment.setImageProviderId(oldDeployment.getImageProviderId());
- newDeployment.setHardwareProviderId(oldDeployment.getHardwareProviderId());
- EmsDeploymentRequest newEmsDeploymentReq = oldDeployment.getEmsDeployment().clone(nodeName);
- newDeployment.setEmsDeployment(newEmsDeploymentReq);
- return newDeployment;
- }).forEach( deployment -> {
- optTask.get().addDeployment(deployment);
- EntityManagerHelper.persist(deployment.getEmsDeployment());
- EntityManagerHelper.persist(deployment);
- EntityManagerHelper.persist(optTask.get());
+ EmsDeploymentRequest newEmsDeploymentReq =
+ oldDeployment.getEmsDeployment() == null ? null : oldDeployment.getEmsDeployment().clone(nodeName);
+ return new Deployment(nodeName,
+ oldDeployment.getLocationName(),
+ oldDeployment.getImageProviderId(),
+ oldDeployment.getHardwareProviderId(),
+ newEmsDeploymentReq,
+ oldDeployment.getPaCloud(),
+ oldDeployment.getTask(),
+ false,
+ null,
+ null
+ );
+ })
+ .forEach(deployment -> {
+ // Persist new deployment data
+ deployment.setNumber(optTask.get().getNextDeploymentID());
+ newNodesNumbers.add(optTask.get().getNextDeploymentID());
+ optTask.get().addDeployment(deployment);
+ if (deployment.getEmsDeployment() != null) {
+ EntityManagerHelper.persist(deployment.getEmsDeployment());
+ }
+ deployment.getPaCloud().addDeployment(deployment);
+ EntityManagerHelper.persist(deployment);
+ EntityManagerHelper.persist(optTask.get());
+ EntityManagerHelper.persist(deployment.getPaCloud());
});
EntityManagerHelper.commit();
+
+ // Let's deploy the VMS
+ submitScalingOutJob(optJob.get(), taskName, newNodesNumbers);
+
return 0;
}
+ private void submitScalingOutJob(Job job, String scaledTaskName, List newNodesNumbers) {
+ EntityManagerHelper.refresh(job);
+ LOGGER.info("Task: " + scaledTaskName + " of job " + job.toString() + " to be scaled out.");
+
+ TaskFlowJob paJob = new TaskFlowJob();
+ paJob.setName(job.getName() + "_" + scaledTaskName + "_ScaleOut");
+ LOGGER.info("Job created: " + paJob.toString());
+
+ EntityManagerHelper.begin();
+
+ job.getTasks().forEach(task -> {
+ List scriptTasks = buildScalingOutPATask(task, job, scaledTaskName);
+
+ if (scriptTasks != null && !scriptTasks.isEmpty()) {
+ addAllScriptTasksToPAJob(paJob, task, scriptTasks);
+ EntityManagerHelper.persist(task);
+ }
+ });
+
+ setAllScalingOutMandatoryDependencies(paJob, job, scaledTaskName, newNodesNumbers);
+
+ paJob.setProjectName("Morphemic");
+
+ long submittedJobId = schedulerGateway.submit(paJob).longValue();
+ job.setSubmittedJobId(submittedJobId);
+
+ EntityManagerHelper.persist(job);
+ EntityManagerHelper.commit();
+ LOGGER.info("Scaling out of task \'" + scaledTaskName + "\' job, submitted successfully. ID = " + submittedJobId);
+ }
+
+ private void addAllScriptTasksToPAJob(TaskFlowJob paJob, Task task, List scriptTasks) {
+ scriptTasks.forEach(scriptTask -> {
+ try {
+ paJob.addTask(scriptTask);
+ } catch (UserException e) {
+ LOGGER.error("Task " + task.getName() + " could not be added due to: " + e.toString());
+ }
+ });
+ }
+
+ private void setAllScalingOutMandatoryDependencies(TaskFlowJob paJob, Job jobToSubmit, String scaledTaskName, List newNodesNumbers) {
+ jobToSubmit.getTasks().forEach(task -> {
+ if (task.getParentTasks() != null && !task.getParentTasks().isEmpty()) {
+ task.getParentTasks().forEach(parentTaskName -> {
+ paJob.getTasks().forEach(paTask -> {
+ paJob.getTasks().forEach(paParentTask -> {
+ if (paTask.getName().contains(task.getName()) && paParentTask.getName().contains(parentTaskName)) {
+ if (paParentTask.getName().contains(scaledTaskName)) {
+ if (newNodesNumbers.stream().anyMatch(entry -> paParentTask.getName().endsWith(entry.toString()))) {
+ if (paTask.getName().contains(task.getDeploymentFirstSubmittedTaskName()) &&
+ paParentTask.getName().contains(jobToSubmit.findTask(parentTaskName).getDeploymentLastSubmittedTaskName())) {
+ paTask.addDependence(paParentTask);
+ }
+ } else {
+ if (paTask.getName().contains(task.getDeploymentFirstSubmittedTaskName()) &&
+ paParentTask.getName().startsWith("prepareInfra")) {
+ paTask.addDependence(paParentTask);
+ }
+ }
+ } else if (paTask.getName().contains(scaledTaskName)) {
+ if (newNodesNumbers.stream().anyMatch(entry -> paTask.getName().endsWith(entry.toString()))) {
+ if (paTask.getName().contains(task.getDeploymentFirstSubmittedTaskName()) &&
+ paParentTask.getName().contains(jobToSubmit.findTask(parentTaskName).getDeploymentLastSubmittedTaskName())) {
+ paTask.addDependence(paParentTask);
+ }
+ } else {
+ if (paTask.getName().startsWith("prepareInfra") &&
+ paParentTask.getName().contains(jobToSubmit.findTask(parentTaskName).getDeploymentLastSubmittedTaskName())) {
+ paTask.addDependence(paParentTask);
+ }
+ }
+ }
+ }
+ });
+ });
+ });
+ }
+ });
+ }
+
+ private List buildScalingOutPATask(Task task, Job job, String scaledTaskName) {
+ List scriptTasks = new LinkedList<>();
+ Task scaledTask = job.findTask(scaledTaskName);
+
+ if (scaledTask.getParentTasks().contains(task.getName())) {
+ // When the scaled task is a child the task to be built
+ LOGGER.info("Building task " + task.getName() + " as a parent of task " + scaledTaskName);
+ scriptTasks.addAll(createParentScaledTask(task, job));
+ } else {
+ // Using buildScalingInPATask because it handles all the remaining cases
+ LOGGER.info("Moving to building with buildScalingInPATask() method");
+ scriptTasks.addAll(buildScalingInPATask(task, job, scaledTaskName));
+ }
+
+ return scriptTasks;
+ }
+
+ private List createChildScaledTask(Task task, Job job) {
+ List scriptTasks = new LinkedList<>();
+ task.getDeployments().stream().filter(Deployment::getIsDeployed).forEach(deployment -> {
+ // Creating infra deployment tasks
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
+ scriptTasks.add(createScalingChildUpdateTask(task, suffix, token, job));
+ });
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ task.setDeploymentLastSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ return scriptTasks;
+ }
+
+ private ScriptTask createScalingChildUpdateTask(Task task, String suffix, String token, Job job) {
+ ScriptTask scriptTaskUpdate = null;
+
+ Map taskVariablesMap = new HashMap<>();
+ //TODO: Taking into consideration multiple parent tasks with multiple communications
+ taskVariablesMap.put("requestedPortName", new TaskVariable("requestedPortName",
+ job.findTask(task.getParentTasks().get(0)).getPortsToOpen().get(0).getRequestedName()));
+
+ if (!task.getInstallation().getUpdateCmd().isEmpty()) {
+ scriptTaskUpdate = PAFactory.createBashScriptTask(task.getName() + "_update" + suffix,
+ Utils.getContentWithFileName("export_env_var_script.sh") + SCRIPTS_SEPARATION +
+ task.getInstallation().getUpdateCmd());
+ } else {
+ scriptTaskUpdate = PAFactory.createBashScriptTask(task.getName() + "_install" + suffix,
+ "echo \"Installation script is empty. Nothing to be executed.\"");
+ }
+
+ scriptTaskUpdate.setPreScript(PAFactory.createSimpleScriptFromFIle("collect_ip_addr_results.groovy",
+ "groovy"));
+
+ scriptTaskUpdate.setVariables(taskVariablesMap);
+ scriptTaskUpdate.addGenericInformation("NODE_ACCESS_TOKEN", token);
+
+ return scriptTaskUpdate;
+ }
+
+ private List buildScaledPATask(Task task, Job job) {
+ List scriptTasks = new LinkedList<>();
+
+ task.getDeployments().stream().filter(Deployment::getIsDeployed).forEach(deployment -> {
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
+
+ // Creating infra preparation task
+ scriptTasks.add(createInfraPreparationTask(task, suffix, token, job));
+ });
+
+ task.setDeploymentLastSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+
+ task.getDeployments().stream().filter(deployment -> !deployment.getIsDeployed()).forEach(deployment -> {
+ // Creating infra deployment tasks
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
+ scriptTasks.add(createInfraTask(task, deployment, suffix, token));
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(scriptTasks.size()-1).getName().substring(0, scriptTasks.get(scriptTasks.size()-1).getName().lastIndexOf("_")));
+ // If the infrastructure comes with the deployment of the EMS, we set it up.
+ Optional.ofNullable(deployment.getEmsDeployment()).ifPresent(emsDeploymentRequest -> scriptTasks.add(createEmsDeploymentTask(emsDeploymentRequest,suffix,token)));
+ LOGGER.info("Token added: " + token);
+ deployment.setIsDeployed(true);
+ deployment.setNodeAccessToken(token);
+
+ // Creating application deployment tasks
+ createAndAddAppDeploymentTasks(task, suffix, token, scriptTasks, job);
+ });
+
+ scriptTasks.forEach(scriptTask -> task.addSubmittedTaskName(scriptTask.getName()));
+
+ return scriptTasks;
+ }
+
+ private void createAndAddAppDeploymentTasks(Task task, String suffix, String token, List scriptTasks, Job job) {
+ List appTasks = createAppTasks(task, suffix, token, job);
+ task.setDeploymentLastSubmittedTaskName(appTasks.get(appTasks.size()-1).getName().substring(0, appTasks.get(appTasks.size()-1).getName().lastIndexOf(suffix)));
+
+ // Creating infra preparation task
+ appTasks.add(0, createInfraPreparationTask(task, suffix, token, job));
+ appTasks.get(1).addDependence(appTasks.get(0));
+
+ // Add dependency between infra and application deployment tasks
+ appTasks.get(0).addDependence(scriptTasks.get(scriptTasks.size()-1));
+
+ scriptTasks.addAll(appTasks);
+ }
+
+ private List createParentScaledTask(Task task, Job job) {
+ List scriptTasks = new LinkedList<>();
+ task.getDeployments().stream().filter(Deployment::getIsDeployed).forEach(deployment -> {
+ // Creating infra deployment tasks
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
+ scriptTasks.add(createScalingParentInfraPreparationTask(task, suffix, token, job));
+ });
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ task.setDeploymentLastSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
+ return scriptTasks;
+ }
+
+ private ScriptTask createScalingParentInfraPreparationTask(Task task, String suffix, String token, Job job) {
+ ScriptTask prepareInfraTask;
+ Map taskVariablesMap = new HashMap<>();
+ String taskName = "parentPrepareInfra_" + task.getName() + suffix;
+
+ if (!task.getPortsToOpen().isEmpty()) {
+ prepareInfraTask = PAFactory.createGroovyScriptTaskFromFile(taskName, "post_prepare_infra_script.groovy");
+ prepareInfraTask.setPreScript(PAFactory.createSimpleScriptFromFIle("prepare_infra_script.sh",
+ "bash"));
+ //TODO: Taking into consideration multiple provided ports
+ taskVariablesMap.put("providedPortName", new TaskVariable("providedPortName",
+ task.getPortsToOpen().get(0).getRequestedName()));
+ taskVariablesMap.put("providedPortValue", new TaskVariable("providedPortValue",
+ task.getPortsToOpen().get(0).getValue().toString()));
+ } else {
+ prepareInfraTask = PAFactory.createBashScriptTask(taskName,
+ "echo \"No ports to open and not parent tasks. Nothing to be prepared in VM.\"");
+ }
+
+ prepareInfraTask.setVariables(taskVariablesMap);
+ prepareInfraTask.addGenericInformation("NODE_ACCESS_TOKEN", token);
+
+ return prepareInfraTask;
+ }
+
/**
* Unregister a set of node as a scale-down operation
* @param nodeNames A list of node to be removed
@@ -794,7 +1112,7 @@ public class PAGateway {
}
// Let's find the task:
- Optional optTask = Optional.ofNullable(EntityManagerHelper.find(Task.class,optJob.get().findTask(taskName)));
+ Optional optTask = Optional.ofNullable(optJob.get().findTask(taskName));
if (!optTask.isPresent()) {
LOGGER.error(String.format("Task [%s] not found", taskName));
return 1;
@@ -807,24 +1125,81 @@ public class PAGateway {
}
// For supplied node, I retrieve their deployment
- List deployments = nodeNames.stream().map(node -> EntityManagerHelper.find(Deployment.class,node)).filter(deployment -> (deployment != null)).collect(Collectors.toList());
+ List deployments = nodeNames.stream().map(node -> EntityManagerHelper.find(Deployment.class,node)).filter(Objects::nonNull).collect(Collectors.toList());
// For deployed node, I flag their removal
- List nodesToBeRemoved = deployments.stream().filter(deployment -> deployment.getIsDeployed()).map(Deployment::getNodeName).collect(Collectors.toList());
+ List nodesToBeRemoved = deployments.stream().filter(Deployment::getIsDeployed).map(Deployment::getNodeName).collect(Collectors.toList());
+ LOGGER.info("Nodes to be removed are : " + nodesToBeRemoved);
// For every node, I remove the deployment entree
deployments.forEach(
deployment -> {
+ deployment.getTask().removeDeployment(deployment);
+ EntityManagerHelper.persist(deployment.getTask());
+ deployment.getPaCloud().removeDeployment(deployment);
+ EntityManagerHelper.persist(deployment.getPaCloud());
EntityManagerHelper.remove(deployment);
- EntityManagerHelper.persist(deployment);
}
);
// I commit the removal of deployed node
- removeNodes(nodesToBeRemoved,false);
+ removeNodes(nodesToBeRemoved,true);
EntityManagerHelper.commit();
+
+ // Let's deploy the VMS
+ submitScalingInJob(optJob.get(), taskName);
+
return 0;
}
+ private void submitScalingInJob(Job job, String scaledTaskName) {
+ EntityManagerHelper.refresh(job);
+ LOGGER.info("Task: " + scaledTaskName + " of job " + job.toString() + " to be scaled in.");
+
+ TaskFlowJob paJob = new TaskFlowJob();
+ paJob.setName(job.getName() + "_" + scaledTaskName + "_ScaleIn");
+ LOGGER.info("Job created: " + paJob.toString());
+
+ EntityManagerHelper.begin();
+
+ job.getTasks().forEach(task -> {
+ List scriptTasks = buildScalingInPATask(task, job, scaledTaskName);
+
+ if (scriptTasks != null && !scriptTasks.isEmpty()) {
+ addAllScriptTasksToPAJob(paJob, task, scriptTasks);
+ EntityManagerHelper.persist(task);
+ }
+ });
+
+ setAllMandatoryDependencies(paJob, job);
+
+ paJob.setProjectName("Morphemic");
+
+ long submittedJobId = schedulerGateway.submit(paJob).longValue();
+ job.setSubmittedJobId(submittedJobId);
+
+ EntityManagerHelper.persist(job);
+ EntityManagerHelper.commit();
+ LOGGER.info("Scaling out of task \'" + scaledTaskName + "\' job, submitted successfully. ID = " + submittedJobId);
+ }
+
+ private List buildScalingInPATask(Task task, Job job, String scaledTaskName) {
+ List scriptTasks = new LinkedList<>();
+
+ if (scaledTaskName.equals(task.getName())) {
+ // When the scaled task is the task to be built
+ LOGGER.info("Building task " + task.getName() + " as it is scaled out");
+ scriptTasks.addAll(buildScaledPATask(task, job));
+ } else if (task.getParentTasks().contains(scaledTaskName)) {
+ // When the scaled task is a parent of the task to be built
+ LOGGER.info("Building task " + task.getName() + " as a child of task " + scaledTaskName);
+ scriptTasks.addAll(createChildScaledTask(task, job));
+ } else {
+ LOGGER.debug("Task " + task.getName() + " is not impacted by the scaling of task " + scaledTaskName);
+ }
+
+ return scriptTasks;
+ }
+
/**
* Translate a Morphemic task skeleton into a list of ProActive tasks
* @param task A Morphemic task skeleton
@@ -833,7 +1208,6 @@ public class PAGateway {
*/
public List buildPATask(Task task, Job job) {
List scriptTasks = new LinkedList<>();
- List tasksTokens = new LinkedList<>();
if (task.getDeployments() == null || task.getDeployments().isEmpty()) {
LOGGER.warn("The task " + task.getName() + " does not have a deployment. It will be scheduled on any free node.");
@@ -842,31 +1216,24 @@ public class PAGateway {
task.setDeploymentLastSubmittedTaskName(scriptTasks.get(scriptTasks.size()-1).getName());
}
else {
- task.getDeployments().forEach(deployment -> {
+ task.getDeployments().stream().filter(deployment -> !deployment.getIsDeployed()).forEach(deployment -> {
// Creating infra deployment tasks
- String token = task.getTaskId() + tasksTokens.size();
- String suffix = "_" + tasksTokens.size();
+ String token = task.getTaskId() + deployment.getNumber();
+ String suffix = "_" + deployment.getNumber();
scriptTasks.add(createInfraTask(task, deployment, suffix, token));
// If the infrastructure comes with the deployment of the EMS, we set it up.
Optional.ofNullable(deployment.getEmsDeployment()).ifPresent(emsDeploymentRequest -> scriptTasks.add(createEmsDeploymentTask(emsDeploymentRequest,suffix,token)));
- LOGGER.debug("Token added: " + token);
- tasksTokens.add(token);
+ LOGGER.info("Token added: " + token);
deployment.setIsDeployed(true);
+ deployment.setNodeAccessToken(token);
- // Creating application deployment tasks
- List appTasks = createAppTasks(task, suffix, token, job);
- task.setDeploymentLastSubmittedTaskName(appTasks.get(appTasks.size()-1).getName().substring(0, appTasks.get(appTasks.size()-1).getName().lastIndexOf(suffix)));
-
- // Creating infra preparation task
- appTasks.add(0, createInfraPreparationTask(task, suffix, token, job));
- appTasks.get(1).addDependence(appTasks.get(0));
+ LOGGER.info("+++ Deployment number: " + deployment.getNumber());
- // Add dependency between infra and application deployment tasks
- appTasks.get(0).addDependence(scriptTasks.get(scriptTasks.size()-1));
- scriptTasks.addAll(appTasks);
+ // Creating application deployment tasks
+ createAndAddAppDeploymentTasks(task, suffix, token, scriptTasks, job);
});
- task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_0")));
+ task.setDeploymentFirstSubmittedTaskName(scriptTasks.get(0).getName().substring(0, scriptTasks.get(0).getName().lastIndexOf("_")));
}
scriptTasks.forEach(scriptTask -> task.addSubmittedTaskName(scriptTask.getName()));
@@ -981,6 +1348,16 @@ public class PAGateway {
return(submittedJobId);
}
+ /**
+ * Stop the deployed job
+ * @param jobId A deployed job identifier
+ * @return The submitted stopping job id
+ */
+ public long stopJob(String jobId) {
+ //TODO: Submit stopping scripts for all apps
+ return 0L;
+ }
+
/**
* Get a ProActive job state
* @param jobId A job ID
@@ -1058,10 +1435,6 @@ public class PAGateway {
String.valueOf(submittedJob.getSubmittedJobId()),
submittedTaskName));
});
-
- TaskResult taskResult = schedulerGateway.getTaskResult(String.valueOf(submittedJob.getSubmittedJobId()),
- createdTask.getSubmittedTaskNames()
- .get(createdTask.getSubmittedTaskNames().size() - 1));
LOGGER.info("Results of task: " + taskName + " fetched successfully: " + taskResultsMap.toString());
return taskResultsMap;
}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/application/deployment/PAFactory.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/application/deployment/PAFactory.java
index dba3f4e46f0d1114970524515a54bc0c74a9cfa0..0b81fb4261a216f6ad8e1a13b02294203327760d 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/application/deployment/PAFactory.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/application/deployment/PAFactory.java
@@ -180,6 +180,7 @@ public class PAFactory {
* @param scriptFileName The script implementation file name
* @param parameters The selection script parameters
* @return A ProActive SelectionScript instance
+ * @throws IOException In case an IOException is thrown
*/
public static SelectionScript createGroovySelectionScript(String scriptFileName, String[] parameters) throws IOException {
SelectionScript selectionScript = null;
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
index 883866de7f8b53c8d220e53bd6d9f0b24618807e..b56d4d44761d29c4dcd776b38445c9acb41fb9e9 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/infrastructure/deployment/PAResourceManagerGateway.java
@@ -5,6 +5,7 @@ import org.apache.http.conn.ConnectTimeoutException;
import org.apache.log4j.Logger;
import org.ow2.proactive.resourcemanager.common.NSState;
import org.ow2.proactive.resourcemanager.common.event.RMNodeEvent;
+import org.ow2.proactive.resourcemanager.common.event.RMNodeSourceEvent;
import org.ow2.proactive.resourcemanager.common.event.dto.RMStateFull;
import org.ow2.proactive.resourcemanager.exception.RMException;
import org.ow2.proactive.resourcemanager.exception.RMNodeException;
@@ -86,6 +87,17 @@ public class PAResourceManagerGateway {
rmRestInterface = RMConnectionHelper.init(paURL);
}
+ /**
+ * Get the available VMs at the proactive server
+ * @return rmNodeEvents the list of the available VMs
+ * @throws NotConnectedException In case the user is not connected
+ * @throws PermissionRestException In case the user does not have valid permissions
+ */
+ public List getListOfNodesEvents() throws NotConnectedException, PermissionRestException {
+ RMStateFull rmStateFull = rmRestInterface.getRMStateFull(RMConnectionHelper.getSessionId());
+ List rmNodeEvents = rmStateFull.getNodesEvents();
+ return rmNodeEvents;
+ }
/**
* Connect to the ProActive server
@@ -94,7 +106,6 @@ public class PAResourceManagerGateway {
* @throws LoginException In case the login is not valid
* @throws KeyException In case the password is not valid
* @throws RMException In case an error happens in the RM
- * @throws NotConnectedException In case the session id is invalid
*/
public void connect(String username, String password) throws LoginException, KeyException, RMException {
RMConnectionHelper.connect(username,password);
@@ -184,6 +195,8 @@ public class PAResourceManagerGateway {
* @param all When true, the search return nodes which contain all tags;
* when false, the search return nodes which contain any tag among the list tags.
* @return the set of urls which match the search condition
+ * @throws NotConnectedException In case the user is not connected
+ * @throws RestException In case a Rest exception is thrown
*/
public List searchNodes(List tags, boolean all) throws NotConnectedException, RestException {
LOGGER.debug("Search for nodes ...");
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
index 48dea298211b426189660d2269d97b5f48f9b97b..d19b9f9b1382abefd0efee5cdc16f57f3592dccc 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Deployment.java
@@ -35,9 +35,18 @@ public class Deployment implements Serializable {
@ManyToOne(fetch = FetchType.EAGER, cascade = CascadeType.REFRESH)
private PACloud paCloud;
+ @ManyToOne(fetch = FetchType.LAZY, cascade = CascadeType.REFRESH)
+ private Task task;
+
@Column(name = "IS_DEPLOYED")
private Boolean isDeployed = false;
+ @Column(name = "NODE_ACCESS_TOKEN")
+ private String nodeAccessToken;
+
+ @Column(name = "NUMBER")
+ private Long number;
+
@Override
public String toString() {
return "Deployment{" +
@@ -45,6 +54,11 @@ public class Deployment implements Serializable {
", locationName='" + locationName + '\'' +
", imageProviderId='" + imageProviderId + '\'' +
", hardwareProviderId='" + hardwareProviderId + '\'' +
+ ", isDeployed='" + isDeployed.toString() + '\'' +
+ ", nodeAccessToken='" + nodeAccessToken + '\'' +
+ ", number='" + number + '\'' +
+ ", paCloud='" + paCloud.getNodeSourceNamePrefix() + '\'' +
+ ", task='" + task.getName() + '\'' +
'}';
}
}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
index 7970f5088aee55be3deafb0dfc89b6d3340fa8a2..be83b1b439fd65fe7ce5858b7659a2aaa88882ef 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Job.java
@@ -1,8 +1,8 @@
package org.activeeon.morphemic.model;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.fasterxml.jackson.databind.ObjectWriter;
import lombok.*;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.codehaus.jackson.map.ObjectWriter;
import javax.persistence.*;
import java.io.IOException;
@@ -46,6 +46,7 @@ public class Job implements Serializable {
* Transform a job into JSON format
*
* @return the JSON representation of the job
+ * @throws IOException In case an IOException is thrown
*/
public String getJobInJson() throws IOException{
ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/NodeType.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/NodeType.java
index f9ff1490d02b0c6d64310f9ae055b00443a764d9..c3899c47f84b279171caf2e8b6d8a20a15180b7f 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/NodeType.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/NodeType.java
@@ -18,8 +18,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #IAAS_VALUE
- * @generated
- * @ordered
*/
IAAS(0, "IAAS", "IAAS"),
@@ -28,8 +26,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #PAAS_VALUE
- * @generated
- * @ordered
*/
PAAS(1, "PAAS", "PAAS"),
@@ -38,8 +34,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #FAAS_VALUE
- * @generated
- * @ordered
*/
FAAS(2, "FAAS", "FAAS"),
@@ -48,16 +42,12 @@ public enum NodeType implements Enumerator {
*
*
* @see #BYON_VALUE
- * @generated
- * @ordered
*/
BYON(3, "BYON", "BYON"), /**
* The 'SIMULATION' literal object.
*
*
* @see #SIMULATION_VALUE
- * @generated
- * @ordered
*/
SIMULATION(4, "SIMULATION", "SIMULATION");
@@ -70,9 +60,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #IAAS
- * @model
- * @generated
- * @ordered
*/
public static final int IAAS_VALUE = 0;
@@ -85,9 +72,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #PAAS
- * @model
- * @generated
- * @ordered
*/
public static final int PAAS_VALUE = 1;
@@ -100,9 +84,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #FAAS
- * @model
- * @generated
- * @ordered
*/
public static final int FAAS_VALUE = 2;
@@ -115,9 +96,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #BYON
- * @model
- * @generated
- * @ordered
*/
public static final int BYON_VALUE = 3;
@@ -130,9 +108,6 @@ public enum NodeType implements Enumerator {
*
*
* @see #SIMULATION
- * @model
- * @generated
- * @ordered
*/
public static final int SIMULATION_VALUE = 4;
@@ -140,7 +115,6 @@ public enum NodeType implements Enumerator {
* An array of all the 'Node Type' enumerators.
*
*
- * @generated
*/
private static final NodeType[] VALUES_ARRAY =
new NodeType[] {
@@ -155,7 +129,6 @@ public enum NodeType implements Enumerator {
* A public read-only list of all the 'Node Type' enumerators.
*
*
- * @generated
*/
public static final List VALUES = Collections.unmodifiableList(Arrays.asList(VALUES_ARRAY));
@@ -165,7 +138,6 @@ public enum NodeType implements Enumerator {
*
* @param literal the literal.
* @return the matching enumerator or null
.
- * @generated
*/
public static NodeType get(String literal) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
@@ -183,7 +155,6 @@ public enum NodeType implements Enumerator {
*
* @param name the name.
* @return the matching enumerator or null
.
- * @generated
*/
public static NodeType getByName(String name) {
for (int i = 0; i < VALUES_ARRAY.length; ++i) {
@@ -201,7 +172,6 @@ public enum NodeType implements Enumerator {
*
* @param value the integer value.
* @return the matching enumerator or null
.
- * @generated
*/
public static NodeType get(int value) {
switch (value) {
@@ -217,21 +187,18 @@ public enum NodeType implements Enumerator {
/**
*
*
- * @generated
*/
private final int value;
/**
*
*
- * @generated
*/
private final String name;
/**
*
*
- * @generated
*/
private final String literal;
@@ -239,7 +206,6 @@ public enum NodeType implements Enumerator {
* Only this class can construct instances.
*
*
- * @generated
*/
private NodeType(int value, String name, String literal) {
this.value = value;
@@ -250,7 +216,6 @@ public enum NodeType implements Enumerator {
/**
*
*
- * @generated
*/
public int getValue() {
return value;
@@ -259,7 +224,6 @@ public enum NodeType implements Enumerator {
/**
*
*
- * @generated
*/
public String getName() {
return name;
@@ -268,7 +232,6 @@ public enum NodeType implements Enumerator {
/**
*
*
- * @generated
*/
public String getLiteral() {
return literal;
@@ -278,7 +241,6 @@ public enum NodeType implements Enumerator {
* Returns the literal value of the enumerator, which is its string representation.
*
*
- * @generated
*/
@Override
public String toString() {
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/PACloud.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/PACloud.java
index 09c986c9e8b106053790e3886a92e5e5ce7774ac..76b4bda1a0530e9e25b2823aef2bfa8b0b3474ec 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/PACloud.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/PACloud.java
@@ -11,6 +11,7 @@ import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.stream.Collectors;
@AllArgsConstructor
@NoArgsConstructor
@@ -61,7 +62,7 @@ public class PACloud implements Serializable {
@ElementCollection(targetClass=String.class)
private Map deployedRegions;
- @OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.REFRESH)
+ @OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.ALL)
private List deployments;
@OneToOne
@@ -74,6 +75,14 @@ public class PACloud implements Serializable {
deployments.add(deployment);
}
+ public void removeDeployment(Deployment deployment) {
+ deployments.remove(deployment);
+ }
+
+ public void clearDeployments() {
+ deployments.clear();
+ }
+
public void addDeployedRegion(String region, String imageProviderId) {
if (deployedRegions==null){
deployedRegions = new HashMap<>();
@@ -87,6 +96,10 @@ public class PACloud implements Serializable {
@Override
public String toString() {
+ String deploymentsPrint = deployments == null ? "[]" : deployments.stream()
+ .map(Deployment::getNodeName)
+ .collect(Collectors.toList())
+ .toString();
return "PACloud{" +
"cloudID='" + cloudID + '\'' +
", nodeSourceNamePrefix='" + nodeSourceNamePrefix + '\'' +
@@ -94,6 +107,7 @@ public class PACloud implements Serializable {
", securityGroup='" + securityGroup + '\'' +
", dummyInfrastructureName='" + dummyInfrastructureName + '\'' +
", deployedRegions=" + deployedRegions +
+ ", deployments='" + deploymentsPrint + '\'' +
'}';
}
}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Port.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Port.java
index 17a3af06abf63bf88ad843bb003b8774f5ba18d1..5def6db2ed67d16afed309bc66865af6ad006f1c 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Port.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Port.java
@@ -23,6 +23,7 @@ public class Port implements Serializable {
@Column(name = "VALUE")
private Integer value;
+ @JsonIgnore
@Column(name = "REQUESTED_NAME")
private String requestedName;
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
index 66dfccf70edefb17249fc73281f31bbdb8237fd3..cf926f03e90a8586f996f9ef37fd69b28f4e435d 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/model/Task.java
@@ -10,7 +10,6 @@ import java.util.LinkedList;
import java.util.List;
-@AllArgsConstructor
@NoArgsConstructor
@ToString
@Getter
@@ -34,7 +33,7 @@ public class Task implements Serializable {
@Embedded
private DockerEnvironment environment;
- @OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.REFRESH)
+ @OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.ALL)
private List deployments;
@OneToMany(fetch = FetchType.EAGER, orphanRemoval = true, cascade = CascadeType.REFRESH)
@@ -59,11 +58,19 @@ public class Task implements Serializable {
@Column(name = "DEPLOYMENT_LAST_SUBMITTED_TASK_NAME")
private String deploymentLastSubmittedTaskName;
+ @Column(name = "NEXT_DEPLOYMENT_ID")
+ private Long nextDeploymentID = 0L;
+
public void addDeployment(Deployment deployment) {
if (deployments==null){
deployments = new LinkedList<>();
}
deployments.add(deployment);
+ nextDeploymentID++;
+ }
+
+ public void removeDeployment(Deployment deployment) {
+ deployments.remove(deployment);
}
public void addSubmittedTaskName(String submittedTaskName) {
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
index 5bad81755b86bcbe729129bcb83eaa07287e560b..7d11f428bf1e62a14231765575fd1be730994e1f 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/EntityManagerHelper.java
@@ -1,9 +1,13 @@
package org.activeeon.morphemic.service;
+import org.apache.commons.configuration2.ex.ConfigurationException;
+import org.apache.log4j.Logger;
import org.hibernate.CacheMode;
import org.hibernate.jpa.QueryHints;
import javax.persistence.*;
+import java.util.HashMap;
+import java.util.Map;
public class EntityManagerHelper {
@@ -11,8 +15,28 @@ public class EntityManagerHelper {
private static final ThreadLocal threadLocal;
+ private static final Logger LOGGER = Logger.getLogger(EntityManagerHelper.class);
+
static {
- emf = Persistence.createEntityManagerFactory("model");
+ Map persistenceConfiguration = new HashMap<>();
+ try {
+ // Load the persistence configurations
+ persistenceConfiguration = PersistenceConfiguration.getAllPersistenceConfigurationPropertiesAsMap();
+ LOGGER.info("Initializing the persistence with custom configurations...");
+
+ } catch (ConfigurationException | NullPointerException e) {
+ /*
+ * In case the properties file was not found, this catch statement is triggered
+ * The EMF will be initialized with default configurations
+ *
+ * The NullPointerException is triggered if the environment variable to locate the properties file
+ * is not found
+ */
+ LOGGER.info("Initializing the persistence with default configurations...");
+ }
+
+ emf = Persistence.createEntityManagerFactory("model", persistenceConfiguration);
+ LOGGER.info("Initializing complete!");
threadLocal = new ThreadLocal();
}
@@ -76,4 +100,4 @@ public class EntityManagerHelper {
public static void commit() {
getEntityManager().getTransaction().commit();
}
-}
\ No newline at end of file
+}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/NodeCandidateUtils.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/NodeCandidateUtils.java
index b0a47cacc3536ba98c253c76266197a676623015..3f943daaf68da576119ccc16e570b9504c29e202 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/NodeCandidateUtils.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/NodeCandidateUtils.java
@@ -1,10 +1,10 @@
package org.activeeon.morphemic.service;
+import com.fasterxml.jackson.databind.ObjectMapper;
import org.activeeon.morphemic.infrastructure.deployment.PAConnectorIaasGateway;
import org.activeeon.morphemic.model.*;
import org.apache.commons.lang3.StringUtils;
import org.apache.log4j.Logger;
-import org.codehaus.jackson.map.ObjectMapper;
import org.json.JSONArray;
import org.json.JSONObject;
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java
new file mode 100644
index 0000000000000000000000000000000000000000..f2b72ab57115b13117f0302fa55b7a9ac09fa1cc
--- /dev/null
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PathHelper.java
@@ -0,0 +1,17 @@
+package org.activeeon.morphemic.service;
+
+public class PathHelper {
+
+ private final static String propertiesFileName = "eu.morphemic.schedulingAbstractionLayer.properties";
+
+ // Environment Variable Name = MELODIC_CONFIG_DIR
+ private final static String propertiesFileEnvironmentVariableName = "MELODIC_CONFIG_DIR";
+
+ public static String getPersistencePropertiesFilePath(){
+ String path = System.getenv(propertiesFileEnvironmentVariableName);
+ if(path.endsWith("/")){
+ path = path.substring(0, path.length() - 1);
+ }
+ return path+"/"+propertiesFileName;
+ }
+}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java
new file mode 100644
index 0000000000000000000000000000000000000000..e8231b259ab080896ccb6862acbaa5eb4fc2e959
--- /dev/null
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/PersistenceConfiguration.java
@@ -0,0 +1,59 @@
+package org.activeeon.morphemic.service;
+
+import org.apache.commons.configuration2.Configuration;
+import org.apache.commons.configuration2.PropertiesConfiguration;
+import org.apache.commons.configuration2.builder.FileBasedConfigurationBuilder;
+import org.apache.commons.configuration2.builder.fluent.Parameters;
+import org.apache.commons.configuration2.builder.fluent.PropertiesBuilderParameters;
+import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
+import org.apache.commons.configuration2.convert.ListDelimiterHandler;
+import org.apache.commons.configuration2.ex.ConfigurationException;
+import org.apache.log4j.Logger;
+
+import java.util.HashMap;
+import java.util.Map;
+
+public class PersistenceConfiguration {
+
+ private static final ListDelimiterHandler DELIMITER = new DefaultListDelimiterHandler(';');
+
+ private static final Logger LOGGER = Logger.getLogger(PersistenceConfiguration.class);
+
+ public static String PERSISTENCE_URL = "sal.persistence.url";
+
+ public static String PERSISTENCE_USERNAME = "sal.persistence.username";
+
+ public static String PERSISTENCE_PASSWORD = "sal.persistence.password";
+
+ public static Configuration loadPersistenceConfiguration () throws ConfigurationException {
+ String PERSISTENCE_PROPERTIES_FILE_PATH = PathHelper.getPersistencePropertiesFilePath();
+ PropertiesBuilderParameters propertyParameters = new Parameters().properties();
+ propertyParameters.setPath(PERSISTENCE_PROPERTIES_FILE_PATH);
+ propertyParameters.setThrowExceptionOnMissing(true);
+ propertyParameters.setListDelimiterHandler(DELIMITER);
+
+ FileBasedConfigurationBuilder builder = new FileBasedConfigurationBuilder<>(PropertiesConfiguration.class);
+
+ builder.configure(propertyParameters);
+
+ LOGGER.debug("Persistence configuration loaded");
+
+ return builder.getConfiguration();
+ }
+
+ public static Map getAllPersistenceConfigurationPropertiesAsMap() throws ConfigurationException {
+ final String JAVAX_URL_PROP = "javax.persistence.jdbc.url";
+ final String JAVAX_USERNAME_PROP = "javax.persistence.jdbc.user";
+ final String JAVAX_PASSWORD_PROP = "javax.persistence.jdbc.password";
+
+ Map persistenceProperties = new HashMap<>();
+
+ Configuration persistenceConfiguration = loadPersistenceConfiguration();
+
+ persistenceProperties.put(JAVAX_URL_PROP, persistenceConfiguration.getString(PERSISTENCE_URL));
+ persistenceProperties.put(JAVAX_USERNAME_PROP, persistenceConfiguration.getString(PERSISTENCE_USERNAME));
+ persistenceProperties.put(JAVAX_PASSWORD_PROP, persistenceConfiguration.getString(PERSISTENCE_PASSWORD));
+
+ return persistenceProperties;
+ }
+}
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
index 29c7cf38b83c5d2b71b9457a3893bb02c3490c15..29ca69c4f007b40a5a8653042b118b9117d408c1 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/RMConnectionHelper.java
@@ -33,10 +33,12 @@ public class RMConnectionHelper {
* @return The initialized RM Interface to be used for sending request to the platform
*/
public static RMRestInterface init(String paURL) {
- if(paURL.contains("trydev2")){
+ if(paURL.contains("trydev2.activeeon")){
sessionPreferencesId = "RM_sessionId_trydev2";
- }else{
+ }else if(paURL.contains("trydev.activeeon")){
sessionPreferencesId = "RM_sessionId";
+ }else{
+ sessionPreferencesId = "TESTING_PREF";
}
// Initialize the client
rmRestInterface = new RMRestClient(paURL + RESOURCE_MANAGER_REST_PATH, null).getRm();
@@ -53,7 +55,6 @@ public class RMConnectionHelper {
*
* @param username Username
* @param password Password
- * @return The user session ID
* @throws LoginException In case the login is not valid
* @throws KeyException In case the password is not valid
* @throws RMException In case an error happens in the RM
diff --git a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/SchedulerConnectionHelper.java b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/SchedulerConnectionHelper.java
index 6efe9c5fb4a55fb91e5ce81a41330ecbb176f50f..77ee0d9af992d2b2f30539be5b703313b042720c 100644
--- a/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/SchedulerConnectionHelper.java
+++ b/scheduling-abstraction-layer/src/main/java/org/activeeon/morphemic/service/SchedulerConnectionHelper.java
@@ -60,9 +60,8 @@ public class SchedulerConnectionHelper {
}
/**
- *
* Disconnect from the Scheduler
- *
+ * @return The disconnected Scheduler gateway
*/
public static synchronized RestSmartProxyImpl disconnect() {
try {
diff --git a/scheduling-abstraction-layer/src/main/resources/collect_ip_addr_results.groovy b/scheduling-abstraction-layer/src/main/resources/collect_ip_addr_results.groovy
new file mode 100644
index 0000000000000000000000000000000000000000..7d4e51644532918dd100cca81f0dc67624234c6a
--- /dev/null
+++ b/scheduling-abstraction-layer/src/main/resources/collect_ip_addr_results.groovy
@@ -0,0 +1,18 @@
+def requestedPortName = variables.get("requestedPortName")
+def publicRequestedPort
+def count = 0
+
+variables.each { key, value ->
+ if (key.contains(requestedPortName)) {
+ if (count == 0) {
+ publicRequestedPort = value.toString()
+ count++
+ } else {
+ publicRequestedPort += "," + value.toString()
+ count++
+ }
+ }
+}
+
+println "publicRequestedPort: " + publicRequestedPort
+variables.put(requestedPortName, publicRequestedPort)
\ No newline at end of file
diff --git a/scheduling-abstraction-layer/src/main/resources/post_prepare_infra_script.groovy b/scheduling-abstraction-layer/src/main/resources/post_prepare_infra_script.groovy
index 71962fa5996f2f56f802325f10dec362c57ef2d7..b590df29efee2e83fe9ef746d5e13790cbfb9679 100644
--- a/scheduling-abstraction-layer/src/main/resources/post_prepare_infra_script.groovy
+++ b/scheduling-abstraction-layer/src/main/resources/post_prepare_infra_script.groovy
@@ -5,5 +5,6 @@ if (providedPortName?.trim()){
def ipAddr = new File(providedPortName+"_ip").text.trim()
def publicProvidedPort = ipAddr + ":" + providedPortValue
variables.put(providedPortName, publicProvidedPort)
+ variables.put(providedPortName + variables.get("PA_TASK_ID"), publicProvidedPort)
println("Provided variable " + providedPortName + "=" + publicProvidedPort)
}
\ No newline at end of file
diff --git a/scheduling-abstraction-layer/src/test/java/org/activeeon/morphemic/service/RMConnectionHelperTest.java b/scheduling-abstraction-layer/src/test/java/org/activeeon/morphemic/service/RMConnectionHelperTest.java
index 653292840c8a3c45f57ac7c28d632f56179524cc..57cea96eabd7966195e0bfedbdc9e2ec7d35b46a 100644
--- a/scheduling-abstraction-layer/src/test/java/org/activeeon/morphemic/service/RMConnectionHelperTest.java
+++ b/scheduling-abstraction-layer/src/test/java/org/activeeon/morphemic/service/RMConnectionHelperTest.java
@@ -14,6 +14,7 @@ import org.ow2.proactive_grid_cloud_portal.common.RMRestInterface;
import javax.security.auth.login.LoginException;
import java.security.KeyException;
+import java.util.prefs.Preferences;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.*;
@@ -27,6 +28,8 @@ class RMConnectionHelperTest {
private final String DUMMY_SESSION_ID = "SESSION_ID";
+ private final Preferences userPreferences = Preferences.userRoot().node("USER_PREFERENCES");
+
@Mock
RMRestInterface rmRestInterface;
@@ -58,10 +61,7 @@ class RMConnectionHelperTest {
// Inject the current mocked RM Interface to the RMConnectionHelper class
RMConnectionHelper.setRmRestInterface(rmRestInterface);
- // Initialize a testing user preference variable
- // It is used to store the session
- String DUMMY_PREFERENCE_ID = "TESTING_PREF";
- RMConnectionHelper.setSessionPreferencesId(DUMMY_PREFERENCE_ID);
+ userPreferences.remove("TESTING_PREF");
// Temporary disable all Logging from the RMConnectionHelper class
// It is enabled after the tests are completed
@@ -79,7 +79,7 @@ class RMConnectionHelperTest {
RMRestInterface initRMRestInterface = RMConnectionHelper.init(DUMMY_CONNECTION_URL);
String DUMMY_RM_INTERFACE_MESSAGE = "org.ow2.proactive_grid_cloud_portal.common.RMRestInterface";
assertTrue(initRMRestInterface.toString().contains(DUMMY_RM_INTERFACE_MESSAGE));
- assertEquals(RMConnectionHelper.getSessionId(), "");
+ assertEquals("", RMConnectionHelper.getSessionId());
}
/**
@@ -94,6 +94,7 @@ class RMConnectionHelperTest {
*/
@Test
void connect() throws LoginException, KeyException, RMException {
+ RMConnectionHelper.setSessionPreferencesId("TESTING_PREF");
RMConnectionHelper.connect(DUMMY_USERNAME,DUMMY_PASSWORD);
assertEquals(DUMMY_SESSION_ID,RMConnectionHelper.getSessionId());
}
@@ -104,6 +105,7 @@ class RMConnectionHelperTest {
*/
@Test
void disconnect() {
+ RMConnectionHelper.setSessionPreferencesId("TESTING_PREF");
RMConnectionHelper.disconnect();
assertEquals("", RMConnectionHelper.getSessionId());
}
@@ -124,5 +126,6 @@ class RMConnectionHelperTest {
@AfterEach
void enableLogging() {
Logger.getLogger(RMConnectionHelper.class).setLevel(Level.ALL);
+ userPreferences.remove("TESTING_PREF");
}
}
\ No newline at end of file