First commit
Change-Id: Ibab69eb96bed65135dba0678964a0fd11d264dce
This commit is contained in:
parent
ea99bf0611
commit
b20b1d6b77
@ -33,18 +33,20 @@ spec:
|
|||||||
{{- toYaml .Values.securityContext | nindent 12 }}
|
{{- toYaml .Values.securityContext | nindent 12 }}
|
||||||
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
|
||||||
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
imagePullPolicy: {{ .Values.image.pullPolicy }}
|
||||||
ports:
|
# ports:
|
||||||
- name: http
|
# - name: http
|
||||||
containerPort: 8080
|
# containerPort: 8080
|
||||||
protocol: TCP
|
# protocol: TCP
|
||||||
livenessProbe:
|
livenessProbe:
|
||||||
httpGet:
|
exec:
|
||||||
path: /
|
command:
|
||||||
port: http
|
- ls
|
||||||
|
- /home
|
||||||
readinessProbe:
|
readinessProbe:
|
||||||
httpGet:
|
exec:
|
||||||
path: /
|
command:
|
||||||
port: http
|
- ls
|
||||||
|
- /home
|
||||||
resources:
|
resources:
|
||||||
{{- toYaml .Values.resources | nindent 12 }}
|
{{- toYaml .Values.resources | nindent 12 }}
|
||||||
{{- with .Values.nodeSelector }}
|
{{- with .Values.nodeSelector }}
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
replicaCount: 1
|
replicaCount: 1
|
||||||
|
|
||||||
image:
|
image:
|
||||||
repository: "quay.io/nebulous/slo-violation-detector-java-spring-boot-demo"
|
repository: "quay.io/nebulous/slo-violation-detector"
|
||||||
pullPolicy: IfNotPresent
|
pullPolicy: IfNotPresent
|
||||||
# Overrides the image tag whose default is the chart appVersion.
|
# Overrides the image tag whose default is the chart appVersion.
|
||||||
tag: ""
|
tag: ""
|
||||||
|
33
java-spring-boot-demo/.gitignore
vendored
33
java-spring-boot-demo/.gitignore
vendored
@ -1,33 +0,0 @@
|
|||||||
HELP.md
|
|
||||||
target/
|
|
||||||
!.mvn/wrapper/maven-wrapper.jar
|
|
||||||
!**/src/main/**/target/
|
|
||||||
!**/src/test/**/target/
|
|
||||||
|
|
||||||
### STS ###
|
|
||||||
.apt_generated
|
|
||||||
.classpath
|
|
||||||
.factorypath
|
|
||||||
.project
|
|
||||||
.settings
|
|
||||||
.springBeans
|
|
||||||
.sts4-cache
|
|
||||||
|
|
||||||
### IntelliJ IDEA ###
|
|
||||||
.idea
|
|
||||||
*.iws
|
|
||||||
*.iml
|
|
||||||
*.ipr
|
|
||||||
|
|
||||||
### NetBeans ###
|
|
||||||
/nbproject/private/
|
|
||||||
/nbbuild/
|
|
||||||
/dist/
|
|
||||||
/nbdist/
|
|
||||||
/.nb-gradle/
|
|
||||||
build/
|
|
||||||
!**/src/main/**/build/
|
|
||||||
!**/src/test/**/build/
|
|
||||||
|
|
||||||
### VS Code ###
|
|
||||||
.vscode/
|
|
@ -1,15 +0,0 @@
|
|||||||
#
|
|
||||||
# Build stage
|
|
||||||
#
|
|
||||||
FROM docker.io/library/maven:3.9.2-eclipse-temurin-17 AS build
|
|
||||||
COPY src /home/app/src
|
|
||||||
COPY pom.xml /home/app
|
|
||||||
RUN mvn -f /home/app/pom.xml clean package
|
|
||||||
|
|
||||||
#
|
|
||||||
# Package stage
|
|
||||||
#
|
|
||||||
FROM docker.io/library/eclipse-temurin:17-jre
|
|
||||||
COPY --from=build /home/app/target/demo-0.0.1-SNAPSHOT.jar /usr/local/lib/demo.jar
|
|
||||||
EXPOSE 8080
|
|
||||||
ENTRYPOINT ["java","-jar","/usr/local/lib/demo.jar"]
|
|
@ -1,42 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
||||||
<modelVersion>4.0.0</modelVersion>
|
|
||||||
<parent>
|
|
||||||
<groupId>org.springframework.boot</groupId>
|
|
||||||
<artifactId>spring-boot-starter-parent</artifactId>
|
|
||||||
<version>3.1.0</version>
|
|
||||||
<relativePath/> <!-- lookup parent from repository -->
|
|
||||||
</parent>
|
|
||||||
<groupId>com.example</groupId>
|
|
||||||
<artifactId>demo</artifactId>
|
|
||||||
<version>0.0.1-SNAPSHOT</version>
|
|
||||||
<name>demo</name>
|
|
||||||
<description>Demo project for Spring Boot</description>
|
|
||||||
<properties>
|
|
||||||
<java.version>17</java.version>
|
|
||||||
</properties>
|
|
||||||
<dependencies>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.springframework.boot</groupId>
|
|
||||||
<artifactId>spring-boot-starter</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.springframework.boot</groupId>
|
|
||||||
<artifactId>spring-boot-starter-web</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.springframework.boot</groupId>
|
|
||||||
<artifactId>spring-boot-starter-test</artifactId>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
|
||||||
<build>
|
|
||||||
<plugins>
|
|
||||||
<plugin>
|
|
||||||
<groupId>org.springframework.boot</groupId>
|
|
||||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
|
||||||
</plugin>
|
|
||||||
</plugins>
|
|
||||||
</build>
|
|
||||||
</project>
|
|
@ -1,13 +0,0 @@
|
|||||||
package com.example.demo;
|
|
||||||
|
|
||||||
import org.springframework.boot.SpringApplication;
|
|
||||||
import org.springframework.boot.autoconfigure.SpringBootApplication;
|
|
||||||
|
|
||||||
@SpringBootApplication
|
|
||||||
public class DemoApplication {
|
|
||||||
|
|
||||||
public static void main(String[] args) {
|
|
||||||
SpringApplication.run(DemoApplication.class, args);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,14 +0,0 @@
|
|||||||
package com.example.demo;
|
|
||||||
|
|
||||||
import org.springframework.web.bind.annotation.RequestMapping;
|
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
|
||||||
|
|
||||||
@RestController
|
|
||||||
public class DemoController {
|
|
||||||
|
|
||||||
@RequestMapping("/")
|
|
||||||
public Object root() {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,13 +0,0 @@
|
|||||||
package com.example.demo;
|
|
||||||
|
|
||||||
import org.junit.jupiter.api.Test;
|
|
||||||
import org.springframework.boot.test.context.SpringBootTest;
|
|
||||||
|
|
||||||
@SpringBootTest
|
|
||||||
class DemoApplicationTests {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
void contextLoads() {
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
22
slo-violation-detector/Dockerfile
Normal file
22
slo-violation-detector/Dockerfile
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
#
|
||||||
|
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
#
|
||||||
|
# Build stage
|
||||||
|
#
|
||||||
|
FROM docker.io/library/maven:3.9.2-eclipse-temurin-17 AS build
|
||||||
|
COPY src /home/app/src
|
||||||
|
COPY pom.xml /home/app
|
||||||
|
RUN mvn -f /home/app/pom.xml -DfailIfNoTests=false -Dtest=!UnboundedMonitoringAttributeTests,!ConnectivityTests clean package
|
||||||
|
|
||||||
|
FROM docker.io/library/eclipse-temurin:17
|
||||||
|
RUN mkdir -p /home/src/main/resources/
|
||||||
|
COPY src/main/resources/config/eu.nebulous.slo_violation_detector.properties /home/src/main/resources/config/input_data.properties
|
||||||
|
COPY src/main/resources/config/eu.melodic.event.brokerclient.properties /home/src/main/resources/config/eu.melodic.event.brokerclient.properties
|
||||||
|
COPY --from=build /home/app/target/SLO-Violation-Detector-4.0-SNAPSHOT.jar /home/SLOSeverityCalculator-4.0-SNAPSHOT.jar
|
||||||
|
WORKDIR /home
|
||||||
|
ENV LOG_FILE /home/slo_violation_detector.log
|
||||||
|
CMD ["/bin/sh","-c","java -jar SLOSeverityCalculator-4.0-SNAPSHOT.jar > $LOG_FILE 2>&1"]
|
143
slo-violation-detector/README.md
Normal file
143
slo-violation-detector/README.md
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
# SLO Severity-based Violation Detector
|
||||||
|
|
||||||
|
## Introduction
|
||||||
|
|
||||||
|
The SLO Severity-based Violation Detector is a component which receives predicted and actual monitoring metric values, and produces AMQP messages which denote i) the calculated severity ii) the probability that a reconfiguration will be required and iii) the timestamp which we refer to.
|
||||||
|
|
||||||
|
The component can run either using a compiled jar file, or be packaged to a Docker container and run in containerized form.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Configuration file
|
||||||
|
|
||||||
|
The component comes with a configuration file which can be used to specify the behaviour of the component (eu.morphemic.slo_violation_detector.properties) and a configuration file which is used to configure the AMQP communication (eu.melodic.event.brokerclient.properties). These files are located in the src/main/resources/config directory of the project.
|
||||||
|
|
||||||
|
The principal configuration options to be changed before deployment are in the eu.morphemic.slo_violation_detector.properties file which is passed as a runtime argument to the component. They are the `broker_addres`(also the`broker_username` and`broker_password`), the `horizon`, the `number_of_days_to_aggregate_data_from` and the `number_of_seconds_to_aggregate_on`.
|
||||||
|
|
||||||
|
| Parameter | Description | Indicative value |
|
||||||
|
| -------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -------------------------------------------------------- |
|
||||||
|
| metrics_bounds | A string value which is a csv list of monitoring metrics, and the respective upper and lower bounds which are known for them beforehand. The list contains triplets which are comma separated, while elements of a triplet are separated with semicolons. Each triplet contains the name of the metric, its lowest bound and its highest bound (or the word ‘unbounded’ if these are not known. If a monitoring metric is not registered here, it will be assumed that it can be assigned any real value from 0 (the lowest bound) to 100 (the highest bound) | avgResponseTime;unbounded;unbounded,custom2;0;3 |
|
||||||
|
| slo_rules_topic | A string value indicating the name of the topic which will be used to send messages (to the SLO Violation Detector) containing the SLOs which should be respected by the application. | metrics.metric_list |
|
||||||
|
| broker_ip_url | A string value indicating the url which should be used to connect to the AMQP broker to send and receive messages. | tcp://localhost:61616?wireFormat.maxInactivityDuration=0 |
|
||||||
|
| broker_username | A string value, which is the username to access the AMQP broker | User1 |
|
||||||
|
| broker_password | A string value, which is the password to access the AMQP broker | userpassword |
|
||||||
|
| slo_violation_determination_method | A string value, indicating the method which is used to determine the probability of a possible future SLO violation. A choice is offered between all-metrics and prconf-delta | all-metrics |
|
||||||
|
| time_horizon_seconds | An integer value indicating the minimum time interval between two successive reconfigurations that the platform can support | 900 |
|
||||||
|
| maximum_acceptable_forward_predictions | An integer value indicating the maximum number of forward predictions for which the component will keep data | 30 |
|
||||||
|
|
||||||
|
## Component input
|
||||||
|
|
||||||
|
The triggering input of the SLO Violation Detector is a JSON message which informs the component about the SLOs which should be respected. The format of these SLOs is the following:
|
||||||
|
|
||||||
|
\<SLO\> ← \<Metric\> \<Operator\> \<Threshold\>
|
||||||
|
|
||||||
|
**Metric**: Any monitoring attribute which can be observed using the EMS can be used in the formulation of an SLO
|
||||||
|
|
||||||
|
**Operator**: Either greater than, greater than or equal, less than or less than or equal.
|
||||||
|
|
||||||
|
**Threshold**: We assume that metric values used in the description of SLOs are real numbers, so any real number which can be handled by Java 9 can be used.
|
||||||
|
|
||||||
|
Multiple SLOs can be joined using an 'AND' or 'OR'-separated syntax. Examples of AND and OR separated SLO's appear below:
|
||||||
|
|
||||||
|
#### A simple SLO rule
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "_",
|
||||||
|
"operator": "OR",
|
||||||
|
"constraints": [
|
||||||
|
{
|
||||||
|
"name": "cpu_usage_too_high",
|
||||||
|
"metric": "cpu_usage",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": 80
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### A complex SLO rule
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "_",
|
||||||
|
"operator": "OR",
|
||||||
|
"constraints": [
|
||||||
|
{
|
||||||
|
"name": "cpu_and_memory_or_swap_too_high",
|
||||||
|
"operator": "AND",
|
||||||
|
"constraints": [
|
||||||
|
{
|
||||||
|
"name": "cpu_usage_high",
|
||||||
|
"metric": "cpu_usage",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": 80
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "memory_or_swap_usage_high",
|
||||||
|
"operator": "OR",
|
||||||
|
"constraints": [
|
||||||
|
{
|
||||||
|
"name": "memory_usage_high",
|
||||||
|
"metric": "ram_usage",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": 70
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "disk_usage_high",
|
||||||
|
"metric": "swap_usage",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": 50
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The simple SLO illustrated above states that the “cpu_usage” monitoring metric should stay ideally below 80 (percent), otherwise an SLO violation should be triggered. On the other hand, the complex SLO involves the use of three monitoring metrics, “cpu_usage”, “free_ram” and “swap_usage”, which should be below 70 and 50 (percent)
|
||||||
|
respectively. The format illustrated in the second example has been devised to allow
|
||||||
|
nested AND-based or OR-based SLOs to be defined. The complex SLO rule states that if (cpu_usage>80 AND (ram_usage>70 OR swap_usage> 50))
|
||||||
|
then an SLO violation should be triggered.
|
||||||
|
|
||||||
|
### Building & Running the component
|
||||||
|
|
||||||
|
The component can be built using Maven (`mvn clean install -Dtest=!UnboundedMonitoringAttributeTests`). This command should succeed without errors, and verifying that all tests (except for the Unbounded monitoring attribute tests) are successfully executed. Then, any of the produced jar files (either the shaded or non-shaded version) can be run using the following command:
|
||||||
|
|
||||||
|
`java -jar <jar_name> <configuration_file_location>`
|
||||||
|
|
||||||
|
When the component starts correctly it will not display any error logs, and it may also display that it listens for events on the topic in which SLO rules are to be specified (by default **metrics.metric_list**).
|
||||||
|
|
||||||
|
When debugging/developing, the component can be started from the Java main method which is located inside the src/runtime/Main.java file.
|
||||||
|
|
||||||
|
### Testing process
|
||||||
|
|
||||||
|
To test the functionality of the component - provided that a working ActiveMQ Broker / Event Management System (EMS) installation is available, the following steps should be followed:
|
||||||
|
|
||||||
|
1. Send a message with the rule to be monitored (In production, the EMS translator is responsible to send this message, as well messages described in step 2. Messages described in step 3 are sent by the Prediction Orchestrator.)
|
||||||
|
|
||||||
|
2. Create a monitoring metrics stream, sending at a monitoring topic some values resembling real monitoring values.
|
||||||
|
|
||||||
|
3. Create a predicted metrics stream, sending at a prediction topic values which resemble real monitoring values. If the predicted values are over the thresholds defined at Step 1, then an SLO event should be created. It is important to create predicted monitoring data for the same monitoring attribute as the one for which realtime monitoring data is generated.
|
||||||
|
|
||||||
|
4. Watch for output on the defined output topic (by default `prediction.slo_severity_value`). The output will have the JSON format illustrated below:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"severity": 0.9064,
|
||||||
|
"predictionTime": 1626181860,
|
||||||
|
"probability": 0.92246521
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
To illustrate, in the case that an SLO message identical to the simple SLO example is sent at step 1, then monitoring messages should be sent in step 2 to the `cpu_usage` topic and predicted monitoring messages should be sent in step 3 to the `prediction.cpu_usage` topic. Finally, SLO violations will be announced at the `prediction.slo_severity_value` topic.
|
||||||
|
|
||||||
|
### Docker container build
|
||||||
|
|
||||||
|
To run the component in Dockerized form, it is sufficient to build the Dockerfile which is included at the root of the project. When running the docker container, the configuration file which will be used is the `src/main/resources/config/eu.morphemic.slo_violation_detector.properties` file, relative to the root of the project (this location specified as a variable in the `configuration.Constants` class). If another configuration file needs to be used, then it should be mounted over the `/home/src/main/resources/config/eu.morphemic.slo_violation_detector.properties` location.
|
||||||
|
|
||||||
|
To start the component, docker run can be used:
|
||||||
|
|
||||||
|
`docker run <container_name>`
|
158
slo-violation-detector/pom.xml
Normal file
158
slo-violation-detector/pom.xml
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<groupId>gr.ntua.imu.nebulous</groupId>
|
||||||
|
<artifactId>SLO-Violation-Detector</artifactId>
|
||||||
|
<version>4.0-SNAPSHOT</version>
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-shade-plugin</artifactId>
|
||||||
|
<version>3.2.4</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<phase>package</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>shade</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<transformers>
|
||||||
|
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
|
||||||
|
<mainClass>runtime.Main</mainClass>
|
||||||
|
</transformer>
|
||||||
|
</transformers>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-compiler-plugin</artifactId>
|
||||||
|
<configuration>
|
||||||
|
<source>11</source>
|
||||||
|
<target>11</target>
|
||||||
|
</configuration>
|
||||||
|
<version>3.8.1</version>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
|
||||||
|
<pluginManagement>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-surefire-plugin</artifactId>
|
||||||
|
<version>3.0.0-M5</version>
|
||||||
|
</plugin>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-deploy-plugin</artifactId>
|
||||||
|
<version>2.8.1</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<id>default-deploy</id>
|
||||||
|
<phase>deploy</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>deploy</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</pluginManagement>
|
||||||
|
</build>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>gr.ntua.imu.morphemic</groupId>
|
||||||
|
<artifactId>amq-message-java-library</artifactId>
|
||||||
|
<version>4.0.0-SNAPSHOT</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>com.googlecode.json-simple</groupId>
|
||||||
|
<artifactId>json-simple</artifactId>
|
||||||
|
<version>1.1</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- https://mvnrepository.com/artifact/org.projectlombok/lombok -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.projectlombok</groupId>
|
||||||
|
<artifactId>lombok</artifactId>
|
||||||
|
<version>1.18.30</version>
|
||||||
|
<scope>provided</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<!-- https://mvnrepository.com/artifact/javax.jms/javax.jms-api -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>javax.jms</groupId>
|
||||||
|
<artifactId>javax.jms-api</artifactId>
|
||||||
|
<version>2.0.1</version>
|
||||||
|
</dependency>
|
||||||
|
<!-- https://mvnrepository.com/artifact/org.apache.activemq/activemq-broker -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.activemq</groupId>
|
||||||
|
<artifactId>activemq-broker</artifactId>
|
||||||
|
<version>5.16.1</version>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.slf4j</groupId>
|
||||||
|
<artifactId>slf4j-api</artifactId>
|
||||||
|
<version>1.8.0-beta4</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework</groupId>
|
||||||
|
<artifactId>spring-context</artifactId>
|
||||||
|
<version>5.3.3</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.springframework.boot</groupId>
|
||||||
|
<artifactId>spring-boot-autoconfigure</artifactId>
|
||||||
|
<version>2.4.3</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-collections4</artifactId>
|
||||||
|
<version>4.2</version>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.commons</groupId>
|
||||||
|
<artifactId>commons-lang3</artifactId>
|
||||||
|
<version>3.8.1</version>
|
||||||
|
<scope>compile</scope>
|
||||||
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>junit</groupId>
|
||||||
|
<artifactId>junit</artifactId>
|
||||||
|
<version>4.13.1</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<repositories>
|
||||||
|
<repository>
|
||||||
|
<id>maven-central</id>
|
||||||
|
<url>https://repo1.maven.org/maven2/</url>
|
||||||
|
</repository>
|
||||||
|
<repository>
|
||||||
|
<id>gitlab-maven-morphemic-preprocessor</id>
|
||||||
|
<url>https://gitlab.ow2.org/api/v4/projects/1370/packages/maven</url>
|
||||||
|
</repository>
|
||||||
|
</repositories>
|
||||||
|
|
||||||
|
<distributionManagement>
|
||||||
|
<snapshotRepository>
|
||||||
|
<id>eu.7bulls</id>
|
||||||
|
<name>Melodic 7bulls repository</name>
|
||||||
|
<url>https://nexus.7bulls.eu:8443/repository/maven-snapshots/</url>
|
||||||
|
</snapshotRepository>
|
||||||
|
<repository>
|
||||||
|
<id>eu.7bulls</id>
|
||||||
|
<name>Melodic 7bulls repository</name>
|
||||||
|
<url>https://nexus.7bulls.eu:8443/repository/maven-releases/</url>
|
||||||
|
</repository>
|
||||||
|
</distributionManagement>
|
||||||
|
</project>
|
@ -0,0 +1,54 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package configuration;
|
||||||
|
|
||||||
|
import java.net.URI;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
|
||||||
|
public class Constants {
|
||||||
|
|
||||||
|
//String constants
|
||||||
|
public static String EMPTY = "";
|
||||||
|
public static String SPACE = " ";
|
||||||
|
public static Double LOWER_LIMIT_DELTA = - 100.0;
|
||||||
|
public static Double UPPER_LIMIT_DELTA = - 100.0;
|
||||||
|
|
||||||
|
|
||||||
|
//Operational constants
|
||||||
|
public static boolean USE_CONFIGURATION_FILE_FOR_METRIC_VALUES_INPUT = false;
|
||||||
|
public static boolean USE_CONFIGURATION_FILE_FOR_METRIC_TOPICS_INPUT = false;
|
||||||
|
public static int elements_considered_in_prediction = 10;
|
||||||
|
public static String slo_violation_determination_method;
|
||||||
|
public static int time_horizon_seconds;
|
||||||
|
public static int maximum_acceptable_forward_predictions;
|
||||||
|
|
||||||
|
public static double confidence_interval;
|
||||||
|
public static double prediction_certainty;
|
||||||
|
|
||||||
|
public static String [] logic_operators = {"and","or"};
|
||||||
|
public static URI base_project_path;
|
||||||
|
public static String configuration_file_location = "src/main/resources/config/input_data.properties";
|
||||||
|
public static String amq_library_configuration_location = "src/main/resources/config/eu.melodic.event.brokerclient.properties";
|
||||||
|
public static String topic_for_severity_announcement = "prediction.slo_severity_value";
|
||||||
|
public static double slo_violation_probability_threshold = 0.5; //The threshold over which the probability of a predicted slo violation should be to have a violation detection
|
||||||
|
public static int kept_values_per_metric = 5; //Default to be overriden from the configuration file. This indicates how many metric values are kept to calculate the "previous" metric value during the rate of change calculation
|
||||||
|
public static String roc_calculation_mode = "prototype";
|
||||||
|
public static boolean self_publish_rule_file = false; //default value to be overriden
|
||||||
|
public static boolean single_slo_rule_active = true; //default value to be overriden
|
||||||
|
public static boolean first_run = true;
|
||||||
|
public static double roc_limit = 1;
|
||||||
|
public static double epsilon = 0.00000000001;
|
||||||
|
public static Level debug_logging_level = Level.OFF;
|
||||||
|
public static Level info_logging_level = Level.INFO; //Default to be overriden from the configuration file
|
||||||
|
public static Level warning_logging_level = Level.WARNING;//Default to be overriden from the configuration file
|
||||||
|
public static Level severe_logging_level = Level.SEVERE;
|
||||||
|
|
||||||
|
//Formatting constants
|
||||||
|
public static String dashed_line = "\n----------------------\n";
|
||||||
|
}
|
@ -0,0 +1,201 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package metric_retrieval;
|
||||||
|
|
||||||
|
import eu.melodic.event.brokerclient.BrokerSubscriber;
|
||||||
|
import eu.melodic.event.brokerclient.templates.EventFields;
|
||||||
|
import eu.melodic.event.brokerclient.templates.TopicNames;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.json.simple.parser.ParseException;
|
||||||
|
import runtime.Main;
|
||||||
|
import slo_processing.SLORule;
|
||||||
|
import slo_processing.SLOSubRule;
|
||||||
|
import utility_beans.PredictedMonitoringAttribute;
|
||||||
|
import utility_beans.RealtimeMonitoringAttribute;
|
||||||
|
|
||||||
|
import java.time.Clock;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static configuration.Constants.*;
|
||||||
|
import static runtime.Main.*;
|
||||||
|
import static utility_beans.PredictedMonitoringAttribute.getPredicted_monitoring_attributes;
|
||||||
|
import static utility_beans.RealtimeMonitoringAttribute.update_monitoring_attribute_value;
|
||||||
|
|
||||||
|
public class AttributeSubscription {
|
||||||
|
SLORule slo_rule;
|
||||||
|
private Thread realtime_subscription_thread, forecasted_subscription_thread;
|
||||||
|
|
||||||
|
|
||||||
|
public AttributeSubscription(SLORule slo_rule, String broker_ip_address, String broker_username, String broker_password){
|
||||||
|
this.slo_rule = slo_rule;
|
||||||
|
for (String metric:slo_rule.get_monitoring_attributes()){
|
||||||
|
|
||||||
|
String realtime_metric_topic_name = TopicNames.realtime_metric_values_topic(metric);
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Starting realtime subscription at "+realtime_metric_topic_name);
|
||||||
|
BrokerSubscriber subscriber = new BrokerSubscriber(realtime_metric_topic_name, broker_ip_address,broker_username,broker_password, amq_library_configuration_location);
|
||||||
|
BiFunction<String,String,String> function = (topic, message) ->{
|
||||||
|
synchronized (RealtimeMonitoringAttribute.getMonitoring_attributes().get(topic)) {
|
||||||
|
try {
|
||||||
|
update_monitoring_attribute_value(topic,((Number)((JSONObject)new JSONParser().parse(message)).get("metricValue")).doubleValue());
|
||||||
|
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"RECEIVED message with value for "+topic+" equal to "+(((JSONObject)new JSONParser().parse(message)).get("metricValue")));
|
||||||
|
} catch (ParseException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"A parsing exception was caught while parsing message: "+message);
|
||||||
|
} catch (Exception e){
|
||||||
|
e.printStackTrace();
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"An unknown exception was caught while parsing message: "+message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
};
|
||||||
|
realtime_subscription_thread = new Thread(() -> {
|
||||||
|
try {
|
||||||
|
subscriber.subscribe(function, Main.stop_signal);
|
||||||
|
if(Thread.interrupted()){
|
||||||
|
throw new InterruptedException();
|
||||||
|
}
|
||||||
|
}catch (Exception i){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Possible interruption of realtime subscriber thread for "+realtime_metric_topic_name+" - if not stacktrace follows");
|
||||||
|
if (! (i instanceof InterruptedException)){
|
||||||
|
i.printStackTrace();
|
||||||
|
}
|
||||||
|
}finally{
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Removing realtime subscriber thread for "+realtime_metric_topic_name);
|
||||||
|
running_threads.remove("realtime_subscriber_thread_" + realtime_metric_topic_name);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
running_threads.put("realtime_subscriber_thread_"+realtime_metric_topic_name,realtime_subscription_thread);
|
||||||
|
realtime_subscription_thread.start();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
String forecasted_metric_topic_name = TopicNames.final_metric_predictions_topic(metric);
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Starting forecasted metric subscription at "+forecasted_metric_topic_name);
|
||||||
|
BrokerSubscriber forecasted_subscriber = new BrokerSubscriber(forecasted_metric_topic_name, broker_ip_address,broker_username,broker_password, amq_library_configuration_location);
|
||||||
|
|
||||||
|
BiFunction<String,String,String> forecasted_function = (topic,message) ->{
|
||||||
|
String predicted_attribute_name = topic.replaceFirst("prediction\\.",EMPTY);
|
||||||
|
HashMap<Integer, HashMap<Long,PredictedMonitoringAttribute>> predicted_attributes = getPredicted_monitoring_attributes();
|
||||||
|
try {
|
||||||
|
double forecasted_value = ((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.metric_value)).doubleValue();
|
||||||
|
double probability_confidence = 100*((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.probability)).doubleValue();
|
||||||
|
JSONArray json_array_confidence_interval = ((JSONArray)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.confidence_interval));
|
||||||
|
|
||||||
|
double confidence_interval;
|
||||||
|
try{
|
||||||
|
confidence_interval = ((Number) json_array_confidence_interval.get(1)).doubleValue() - ((Number) json_array_confidence_interval.get(0)).doubleValue();
|
||||||
|
}catch (ClassCastException | NumberFormatException c){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Catching exception successfully");
|
||||||
|
c.printStackTrace();
|
||||||
|
confidence_interval = Double.NEGATIVE_INFINITY;
|
||||||
|
}
|
||||||
|
long timestamp = ((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.timestamp)).longValue();
|
||||||
|
long targeted_prediction_time = ((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.prediction_time)).longValue();
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"RECEIVED message with predicted value for "+predicted_attribute_name+" equal to "+ forecasted_value);
|
||||||
|
|
||||||
|
|
||||||
|
synchronized (ADAPTATION_TIMES_MODIFY) {
|
||||||
|
while (!ADAPTATION_TIMES_MODIFY.getValue()){
|
||||||
|
try {
|
||||||
|
ADAPTATION_TIMES_MODIFY.wait();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Logger.getAnonymousLogger().log(warning_logging_level,"Interrupted while waiting to access the lock for adaptation times object");
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ADAPTATION_TIMES_MODIFY.setValue(false);
|
||||||
|
if (!adaptation_times.contains(targeted_prediction_time) && (!adaptation_times_pending_processing.contains(targeted_prediction_time)) && ((targeted_prediction_time * 1000 - time_horizon_seconds * 1000L) > (Clock.systemUTC()).millis())) {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Adding a new targeted prediction time " + targeted_prediction_time + " expiring in "+(targeted_prediction_time*1000-System.currentTimeMillis())+" from topic "+topic);
|
||||||
|
adaptation_times.add(targeted_prediction_time);
|
||||||
|
synchronized (PREDICTION_EXISTS) {
|
||||||
|
PREDICTION_EXISTS.setValue(true);
|
||||||
|
PREDICTION_EXISTS.notifyAll();
|
||||||
|
}
|
||||||
|
}else {
|
||||||
|
if (adaptation_times.contains(targeted_prediction_time)) {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Could not add the new targeted prediction time " + targeted_prediction_time + " from topic " + topic + " as it is already present");
|
||||||
|
} else if (!adaptation_times_pending_processing.contains(targeted_prediction_time)) {
|
||||||
|
if (targeted_prediction_time * 1000 - time_horizon_seconds * 1000L - (Clock.systemUTC()).millis() <= 0) {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Could not add the new targeted prediction time " + targeted_prediction_time + " from topic " + topic + " as it would expire in " + (targeted_prediction_time * 1000 - System.currentTimeMillis()) + " milliseconds and the prediction horizon is " + time_horizon_seconds * 1000L + " milliseconds");
|
||||||
|
}else{
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Adding new prediction time "+targeted_prediction_time+" which expires in " + (targeted_prediction_time * 1000 - System.currentTimeMillis()));
|
||||||
|
adaptation_times_pending_processing.add(targeted_prediction_time);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ADAPTATION_TIMES_MODIFY.setValue(true);
|
||||||
|
ADAPTATION_TIMES_MODIFY.notifyAll();
|
||||||
|
}
|
||||||
|
synchronized (Main.can_modify_slo_rules) {
|
||||||
|
while(!Main.can_modify_slo_rules.getValue()) {
|
||||||
|
Main.can_modify_slo_rules.wait();
|
||||||
|
}
|
||||||
|
Main.can_modify_slo_rules.setValue(false);
|
||||||
|
for (SLOSubRule subrule : SLOSubRule.getSlo_subrules_per_monitoring_attribute().get(predicted_attribute_name)) { //Get the subrules which are associated to the monitoring attribute which is predicted, and perform the following processing to each one of them
|
||||||
|
|
||||||
|
getPredicted_monitoring_attributes().computeIfAbsent(subrule.getId(), k -> new HashMap<>());
|
||||||
|
//if ( (getPredicted_monitoring_attributes().get(subrule.getId()).get(targeted_prediction_time)!=null) &&(getPredicted_monitoring_attributes().get(subrule.getId()).get(targeted_prediction_time).getTimestamp()>timestamp)){
|
||||||
|
if (last_processed_adaptation_time>=targeted_prediction_time){
|
||||||
|
//Do nothing, as in this case the targeted prediction time of the 'new' prediction is older than or equal to the last processed adaptation timepoint. This means that this prediction has arrived delayed, and so it should be disregarded
|
||||||
|
}else {
|
||||||
|
PredictedMonitoringAttribute prediction_attribute = new PredictedMonitoringAttribute(predicted_attribute_name, subrule.getThreshold(), subrule.getId(), forecasted_value, probability_confidence, confidence_interval,timestamp, targeted_prediction_time);
|
||||||
|
|
||||||
|
subrule.setAssociated_predicted_monitoring_attribute(prediction_attribute);
|
||||||
|
|
||||||
|
getPredicted_monitoring_attributes().get(subrule.getId()).put(targeted_prediction_time, prediction_attribute);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_modify_slo_rules.setValue(true);
|
||||||
|
can_modify_slo_rules.notifyAll();
|
||||||
|
}
|
||||||
|
//SLOViolationCalculator.get_Severity_all_metrics_method(prediction_attribute)
|
||||||
|
|
||||||
|
} catch (ParseException p){
|
||||||
|
p.printStackTrace();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Monitoring attribute subscription thread for prediction attribute "+predicted_attribute_name+" is stopped");
|
||||||
|
} catch (ClassCastException | NumberFormatException n){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Error while trying to parse message\n"+message);
|
||||||
|
} catch (Exception e){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"An unknown exception was caught\n"+message);
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
};
|
||||||
|
|
||||||
|
|
||||||
|
forecasted_subscription_thread = new Thread(() -> {
|
||||||
|
try {
|
||||||
|
synchronized (Main.HAS_MESSAGE_ARRIVED.get_synchronized_boolean(forecasted_metric_topic_name)) {
|
||||||
|
//if (Main.HAS_MESSAGE_ARRIVED.get_synchronized_boolean(forecasted_metric_topic_name).getValue())
|
||||||
|
forecasted_subscriber.subscribe(forecasted_function,Main.stop_signal);
|
||||||
|
}
|
||||||
|
if (Thread.interrupted()) {
|
||||||
|
throw new InterruptedException();
|
||||||
|
}
|
||||||
|
}catch (Exception i){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Possible interruption of forecasting subscriber thread for "+forecasted_metric_topic_name+" - if not stacktrace follows");
|
||||||
|
if (! (i instanceof InterruptedException)){
|
||||||
|
i.printStackTrace();
|
||||||
|
}
|
||||||
|
}finally {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Removing forecasting subscriber thread for "+forecasted_metric_topic_name);
|
||||||
|
running_threads.remove("forecasting_subscriber_thread_"+forecasted_metric_topic_name);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
running_threads.put("forecasting_subscriber_thread_"+forecasted_metric_topic_name,forecasted_subscription_thread);
|
||||||
|
forecasted_subscription_thread.start();
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
718
slo-violation-detector/src/main/java/runtime/Main.java
Normal file
718
slo-violation-detector/src/main/java/runtime/Main.java
Normal file
@ -0,0 +1,718 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package runtime;
|
||||||
|
|
||||||
|
import eu.melodic.event.brokerclient.BrokerPublisher;
|
||||||
|
import eu.melodic.event.brokerclient.BrokerSubscriber;
|
||||||
|
import metric_retrieval.AttributeSubscription;
|
||||||
|
import org.apache.commons.collections4.queue.CircularFifoQueue;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import slo_processing.SLORule;
|
||||||
|
import slo_processing.SLOSubRule;
|
||||||
|
import utilities.DebugDataSubscription;
|
||||||
|
import utilities.MathUtils;
|
||||||
|
import utility_beans.*;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.sql.Timestamp;
|
||||||
|
import java.time.Clock;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static configuration.Constants.*;
|
||||||
|
import static java.lang.Thread.sleep;
|
||||||
|
import static slo_processing.SLORule.process_rule_value;
|
||||||
|
import static utility_beans.PredictedMonitoringAttribute.getPredicted_monitoring_attributes;
|
||||||
|
import static utility_beans.RealtimeMonitoringAttribute.getMonitoring_attributes_bounds_representation;
|
||||||
|
|
||||||
|
public class Main {
|
||||||
|
public static final AtomicBoolean stop_signal = new AtomicBoolean(false);
|
||||||
|
public static final SynchronizedBoolean PREDICTION_EXISTS = new SynchronizedBoolean(false);
|
||||||
|
public static final SynchronizedBoolean ADAPTATION_TIMES_MODIFY = new SynchronizedBoolean(true);
|
||||||
|
public static SynchronizedBooleanMap HAS_MESSAGE_ARRIVED = new SynchronizedBooleanMap();
|
||||||
|
public static SynchronizedStringMap MESSAGE_CONTENTS = new SynchronizedStringMap();
|
||||||
|
public static ArrayList<SLORule> slo_rules = new ArrayList<>();
|
||||||
|
public static HashMap<String,Thread> running_threads = new HashMap<>();
|
||||||
|
public static HashSet<Long> adaptation_times = new HashSet<>();
|
||||||
|
public static HashSet<Long> adaptation_times_pending_processing = new HashSet<>();
|
||||||
|
private static HashSet<Long> adaptation_times_to_remove = new HashSet<>();
|
||||||
|
public static Long last_processed_adaptation_time = -1L;//initialization
|
||||||
|
public static Long current_slo_rules_version = -1L;//initialization
|
||||||
|
public static final AtomicBoolean slo_rule_arrived = new AtomicBoolean(false);
|
||||||
|
public static final SynchronizedBoolean can_modify_slo_rules = new SynchronizedBoolean(false);
|
||||||
|
|
||||||
|
//Debugging variables
|
||||||
|
public static CircularFifoQueue<Long> slo_violation_event_recording_queue = new CircularFifoQueue<>(50);
|
||||||
|
public static CircularFifoQueue<String> severity_calculation_event_recording_queue = new CircularFifoQueue<>(50);
|
||||||
|
private static Properties prop = new Properties();
|
||||||
|
|
||||||
|
public static void main(String[] args) {
|
||||||
|
|
||||||
|
//The input of this program is the type of the SLO violations which are monitored, and the predicted metric values which are evaluated. Its output are events which produce an estimate of the probability of an adaptation.
|
||||||
|
//The SLO violations which are monitored need to mention the following data:
|
||||||
|
// - The name of the predicted metrics which are monitored as part of the rule
|
||||||
|
// - The threshold and whether it is a more-than or less-than threshold
|
||||||
|
//The predicted metrics need to include the following data:
|
||||||
|
// - The predicted value
|
||||||
|
// - The prediction confidence
|
||||||
|
|
||||||
|
try {
|
||||||
|
InputStream inputStream = null;
|
||||||
|
if (args.length == 0) {
|
||||||
|
base_project_path = new File("").toURI();
|
||||||
|
URI absolute_configuration_file_path = new File(configuration_file_location).toURI();
|
||||||
|
URI relative_configuration_file_path = base_project_path.relativize(absolute_configuration_file_path);
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"This is the base project path:"+base_project_path);
|
||||||
|
inputStream = new FileInputStream(base_project_path.getPath()+relative_configuration_file_path);
|
||||||
|
} else {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "A preferences file has been manually specified");
|
||||||
|
|
||||||
|
if (base_project_path == null || base_project_path.getPath().equals(EMPTY)) {
|
||||||
|
base_project_path = new File(args[0]).toURI();
|
||||||
|
}
|
||||||
|
inputStream = new FileInputStream(base_project_path.getPath());
|
||||||
|
}
|
||||||
|
prop.load(inputStream);
|
||||||
|
String slo_rules_topic = prop.getProperty("slo_rules_topic");
|
||||||
|
kept_values_per_metric = Integer.parseInt(prop.getProperty("stored_values_per_metric","5"));
|
||||||
|
self_publish_rule_file = Boolean.parseBoolean(prop.getProperty("self_publish_rule_file"));
|
||||||
|
single_slo_rule_active = Boolean.parseBoolean(prop.getProperty("single_slo_rule_active"));
|
||||||
|
time_horizon_seconds = Integer.parseInt(prop.getProperty("time_horizon_seconds"));
|
||||||
|
|
||||||
|
slo_violation_probability_threshold = Double.parseDouble(prop.getProperty("slo_violation_probability_threshold"));
|
||||||
|
slo_violation_determination_method = prop.getProperty("slo_violation_determination_method");
|
||||||
|
maximum_acceptable_forward_predictions = Integer.parseInt(prop.getProperty("maximum_acceptable_forward_predictions"));
|
||||||
|
ArrayList<String> unbounded_metric_strings = new ArrayList<String>(Arrays.asList(prop.getProperty("metrics_bounds").split(",")));
|
||||||
|
for (String metric_string : unbounded_metric_strings){
|
||||||
|
getMonitoring_attributes_bounds_representation().put(metric_string.split(";")[0], metric_string.split(";",2)[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
|
||||||
|
if (first_run && USE_CONFIGURATION_FILE_FOR_METRIC_VALUES_INPUT) {
|
||||||
|
|
||||||
|
String json_file_name = prop.getProperty("input_file");
|
||||||
|
slo_violation_determination_method = prop.getProperty("slo_violation_determination_method");
|
||||||
|
confidence_interval = Double.parseDouble(prop.getProperty("confidence_interval"));
|
||||||
|
prediction_certainty = Double.parseDouble(prop.getProperty("prediction_certainty"));
|
||||||
|
|
||||||
|
ArrayList<String> metric_names = new ArrayList<>() {{
|
||||||
|
add("cpu");
|
||||||
|
add("ram");
|
||||||
|
add("bandwidth");
|
||||||
|
add("disk");
|
||||||
|
}};
|
||||||
|
HashMap<String, Double> input_data = new HashMap<>();
|
||||||
|
for (String metric_name : metric_names) {
|
||||||
|
|
||||||
|
Double metric_input_data = MathUtils.get_average(new ArrayList<>(Arrays.stream(prop.getProperty(metric_name).split(",")).map(Double::parseDouble).collect(Collectors.toList())));
|
||||||
|
|
||||||
|
input_data.put(metric_name, metric_input_data);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
RealtimeMonitoringAttribute.initialize_monitoring_attribute_rates_of_change(metric_names);
|
||||||
|
RealtimeMonitoringAttribute.simple_initialize_0_100_bounded_attributes(metric_names);
|
||||||
|
|
||||||
|
RealtimeMonitoringAttribute.update_monitoring_attributes_values_map(input_data);
|
||||||
|
|
||||||
|
//Parsing of file
|
||||||
|
String rules_json_string = String.join(EMPTY, Files.readAllLines(Paths.get(new File(json_file_name).getAbsolutePath())));
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, rules_json_string);
|
||||||
|
Main.MESSAGE_CONTENTS.assign_value(slo_rules_topic, rules_json_string);
|
||||||
|
slo_rules.add(new SLORule(Main.MESSAGE_CONTENTS.get_synchronized_contents(slo_rules_topic), new ArrayList<>(Arrays.asList(prop.getProperty("metrics_list").split(",")))));
|
||||||
|
|
||||||
|
} else {
|
||||||
|
if (first_run && USE_CONFIGURATION_FILE_FOR_METRIC_TOPICS_INPUT) {
|
||||||
|
synchronized (can_modify_slo_rules) {
|
||||||
|
//do {
|
||||||
|
// can_modify_slo_rules.wait();
|
||||||
|
//}while(!can_modify_slo_rules.getValue());
|
||||||
|
can_modify_slo_rules.setValue(false);
|
||||||
|
|
||||||
|
slo_rules.add(new SLORule(Main.MESSAGE_CONTENTS.get_synchronized_contents(slo_rules_topic), new ArrayList<>(Arrays.asList(prop.getProperty("metrics_list").split(",")))));
|
||||||
|
can_modify_slo_rules.setValue(true);
|
||||||
|
slo_rule_arrived.set(true);
|
||||||
|
can_modify_slo_rules.notifyAll();
|
||||||
|
}
|
||||||
|
} else if (first_run){
|
||||||
|
|
||||||
|
BiFunction<String, String, String> function = (topic, message) -> {
|
||||||
|
synchronized (can_modify_slo_rules) {
|
||||||
|
can_modify_slo_rules.setValue(true);
|
||||||
|
Main.MESSAGE_CONTENTS.assign_value(topic, message);
|
||||||
|
slo_rule_arrived.set(true);
|
||||||
|
can_modify_slo_rules.notifyAll();
|
||||||
|
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "BrokerClientApp: - Received text message: " + message + " at topic " + topic);
|
||||||
|
|
||||||
|
}
|
||||||
|
return topic + ":MSG:" + message;
|
||||||
|
};
|
||||||
|
|
||||||
|
BrokerSubscriber subscriber = new BrokerSubscriber(slo_rules_topic, prop.getProperty("broker_ip_url"), prop.getProperty("broker_username"), prop.getProperty("broker_password"), amq_library_configuration_location);
|
||||||
|
new Thread(() -> {
|
||||||
|
while (true) {
|
||||||
|
subscriber.subscribe(function, new AtomicBoolean(false)); //This subscriber should be immune to stop signals
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Broker unavailable, will try to reconnect after 10 seconds");
|
||||||
|
try {
|
||||||
|
Thread.sleep(10000);
|
||||||
|
}catch (InterruptedException i){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Sleep was interrupted, will immediately try to connect to the broker");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).start();
|
||||||
|
|
||||||
|
if (self_publish_rule_file) {
|
||||||
|
String json_file_name = prop.getProperty("input_file");
|
||||||
|
String rules_json_string = String.join(EMPTY, Files.readAllLines(Paths.get(new File(json_file_name).getAbsolutePath())));
|
||||||
|
BrokerPublisher publisher = new BrokerPublisher(slo_rules_topic, prop.getProperty("broker_ip_url"), prop.getProperty("broker_username"), prop.getProperty("broker_password"), amq_library_configuration_location);
|
||||||
|
publisher.publish(rules_json_string);
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Sent message\n" + rules_json_string);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
first_run = false;
|
||||||
|
synchronized (can_modify_slo_rules) {
|
||||||
|
do {
|
||||||
|
try {
|
||||||
|
can_modify_slo_rules.wait();
|
||||||
|
}catch (InterruptedException i){
|
||||||
|
i.printStackTrace();
|
||||||
|
}
|
||||||
|
}while((!can_modify_slo_rules.getValue()) || (!slo_rule_arrived.get()));
|
||||||
|
can_modify_slo_rules.setValue(false);
|
||||||
|
slo_rule_arrived.set(false);
|
||||||
|
String rule_representation = MESSAGE_CONTENTS.get_synchronized_contents(slo_rules_topic);
|
||||||
|
if (slo_rule_arrived_has_updated_version(rule_representation)) {
|
||||||
|
if (single_slo_rule_active) {
|
||||||
|
slo_rules.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
ArrayList<String> additional_metrics_from_new_slo = get_metric_list_from_JSON_slo(rule_representation);
|
||||||
|
|
||||||
|
if (additional_metrics_from_new_slo.size() > 0) {
|
||||||
|
slo_rules.add(new SLORule(rule_representation, additional_metrics_from_new_slo));
|
||||||
|
}
|
||||||
|
can_modify_slo_rules.setValue(true);
|
||||||
|
can_modify_slo_rules.notifyAll();
|
||||||
|
}else{
|
||||||
|
can_modify_slo_rules.setValue(true);
|
||||||
|
can_modify_slo_rules.notifyAll();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
stop_all_running_threads();
|
||||||
|
DebugDataSubscription.initiate(prop.getProperty("broker_ip_url"),prop.getProperty("broker_username"), prop.getProperty("broker_password"));
|
||||||
|
initialize_monitoring_datastructures_with_empty_data(slo_rules);
|
||||||
|
//
|
||||||
|
initialize_subrule_and_attribute_associations(slo_rules);
|
||||||
|
initialize_attribute_subscribers(slo_rules, prop.getProperty("broker_ip_url"), prop.getProperty("broker_username"), prop.getProperty("broker_password"));
|
||||||
|
initialize_slo_processing(slo_rules);
|
||||||
|
|
||||||
|
//while (true) {
|
||||||
|
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
}catch (Exception e){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Problem reading input file");
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean slo_rule_arrived_has_updated_version(String rule_representation) {
|
||||||
|
JSONObject json_object = null;
|
||||||
|
long json_object_version = Integer.MAX_VALUE;
|
||||||
|
try {
|
||||||
|
json_object = (JSONObject) new JSONParser().parse(rule_representation);
|
||||||
|
json_object_version = (Long) json_object.get("version");
|
||||||
|
} catch (NullPointerException n){
|
||||||
|
n.printStackTrace();
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Unfortunately a null message was sent to the SLO Violation Detector, which is being ignored");
|
||||||
|
return false;
|
||||||
|
} catch (Exception e){
|
||||||
|
e.printStackTrace();
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Could not parse the JSON of the new SLO, assuming it is not an updated rule...");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (json_object_version > current_slo_rules_version){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"An SLO with updated version ("+json_object_version+" vs older "+current_slo_rules_version+") has arrived");
|
||||||
|
current_slo_rules_version=json_object_version;
|
||||||
|
return true;
|
||||||
|
}else {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Taking no action for the received SLO message as the version number is not updated");
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void stop_all_running_threads() {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Asking previously existing threads to terminate");
|
||||||
|
int initial_number_of_running_threads = running_threads.size();
|
||||||
|
while (running_threads.size()>0) {
|
||||||
|
synchronized (stop_signal) {
|
||||||
|
stop_signal.set(true);
|
||||||
|
stop_signal.notifyAll();
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
Thread.sleep(3000);
|
||||||
|
running_threads.values().forEach(Thread::interrupt);
|
||||||
|
}catch (Exception e){
|
||||||
|
}
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Stopped "+(initial_number_of_running_threads-running_threads.size())+"/"+initial_number_of_running_threads+" already running threads");
|
||||||
|
if (running_threads.size()>1){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The threads which are still running are the following: "+running_threads);
|
||||||
|
}else if (running_threads.size()>0){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The thread which is still running is the following: "+running_threads);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"All threads have terminated");
|
||||||
|
synchronized (stop_signal) {
|
||||||
|
stop_signal.set(false);
|
||||||
|
}
|
||||||
|
synchronized (PREDICTION_EXISTS){
|
||||||
|
PREDICTION_EXISTS.setValue(false);
|
||||||
|
}
|
||||||
|
adaptation_times.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void initialize_subrule_and_attribute_associations(ArrayList<SLORule> slo_rules) {
|
||||||
|
synchronized (can_modify_slo_rules) {
|
||||||
|
while (!can_modify_slo_rules.getValue()){
|
||||||
|
try {
|
||||||
|
can_modify_slo_rules.wait();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_modify_slo_rules.setValue(false);
|
||||||
|
for (SLORule slo_rule : slo_rules) {
|
||||||
|
for (SLOSubRule subrule : SLORule.parse_subrules(slo_rule.getRule_representation(),slo_rule.getRule_format())) {
|
||||||
|
SLOSubRule.getSlo_subrules_per_monitoring_attribute().computeIfAbsent(subrule.getMetric(), k -> new ArrayList<>());
|
||||||
|
SLOSubRule.getSlo_subrules_per_monitoring_attribute().get(subrule.getMetric()).add(subrule);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_modify_slo_rules.setValue(true);
|
||||||
|
can_modify_slo_rules.notifyAll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static void initialize_monitoring_datastructures_with_empty_data(ArrayList<SLORule> slo_rules){
|
||||||
|
for(SLORule slo_rule: slo_rules){
|
||||||
|
for (String metric_name : slo_rule.get_monitoring_attributes()) {
|
||||||
|
MonitoringAttributeUtilities.initialize_values(metric_name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ArrayList<String> get_metric_list_from_JSON_slo(String json_object_string) {
|
||||||
|
HashSet<String> metric_list = new HashSet<>();
|
||||||
|
try {
|
||||||
|
JSONObject json_object = (JSONObject) new JSONParser().parse(json_object_string);
|
||||||
|
String json_object_id = (String) json_object.get("id");
|
||||||
|
String json_object_name = (String) json_object.get("name");
|
||||||
|
//Older format uses id-based fields, newer format uses a non-variable structure
|
||||||
|
//We first check if an event using the older format is sent, and then check if the event is sent using the newer format
|
||||||
|
if (json_object_id!=null) {
|
||||||
|
if (json_object_id.split("-").length > 1) {
|
||||||
|
//String composite_rule_type = json_object_id.split("-")[0];
|
||||||
|
JSONArray internal_json_slos = (JSONArray) json_object.get(json_object_id);
|
||||||
|
for (Object o : internal_json_slos) {
|
||||||
|
JSONObject internal_json_slo = (JSONObject) o;
|
||||||
|
metric_list.addAll(get_metric_list_from_JSON_slo(internal_json_slo.toJSONString()));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
metric_list.add((String) json_object.get("attribute"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//If newer format is used
|
||||||
|
else if (json_object_name!=null){
|
||||||
|
JSONArray internal_json_slos = (JSONArray) json_object.get("constraints");
|
||||||
|
if ((internal_json_slos!=null) && (internal_json_slos.size()>0)){
|
||||||
|
for (Object o : internal_json_slos) {
|
||||||
|
JSONObject internal_json_slo = (JSONObject) o;
|
||||||
|
metric_list.addAll(get_metric_list_from_JSON_slo(internal_json_slo.toJSONString()));
|
||||||
|
}
|
||||||
|
}else{
|
||||||
|
metric_list.add((String) json_object.get("metric"));
|
||||||
|
}
|
||||||
|
}else{
|
||||||
|
Logger.getAnonymousLogger().log(Level.INFO,"An SLO rule was sent in a format which could not be fully parsed, therefore ignoring this rule. The non-understandable part of the SLO rule is printed below"+"\n"+json_object_string);
|
||||||
|
}
|
||||||
|
}catch (Exception p){
|
||||||
|
p.printStackTrace();
|
||||||
|
return new ArrayList<String>();
|
||||||
|
}
|
||||||
|
return new ArrayList<String>(metric_list);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static double get_metric_value_from_JSON(String data_arrived) {
|
||||||
|
double result = -1;
|
||||||
|
|
||||||
|
JSONObject json_data_representation = render_valid_json(data_arrived,"=");
|
||||||
|
//JSONObject json_data_representation = (JSONObject) new JSONParser().parse(data_arrived);
|
||||||
|
result = Double.parseDouble((String)json_data_representation.get("metricValue"));
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method replaces any invalid characters in the json which is received from the broker, and creates a valid JSON object
|
||||||
|
* @param data_arrived The data which is received from the broker
|
||||||
|
* @param string_to_replace The invalid character which should be substituted by an 'equals' sign
|
||||||
|
* @return A JSON object
|
||||||
|
*/
|
||||||
|
private static JSONObject render_valid_json(String data_arrived, String string_to_replace) {
|
||||||
|
String valid_json_string = new String(data_arrived);
|
||||||
|
JSONObject json_object = new JSONObject();
|
||||||
|
valid_json_string = valid_json_string.replaceAll(string_to_replace,":");
|
||||||
|
|
||||||
|
valid_json_string = valid_json_string.replaceAll("[{}]","");
|
||||||
|
|
||||||
|
String [] json_elements = valid_json_string.split(",");
|
||||||
|
List <String> json_elements_list = Arrays.stream(json_elements).map(String::trim).collect(Collectors.toList());
|
||||||
|
|
||||||
|
for (String element: json_elements_list) {
|
||||||
|
json_object.put(element.split(":")[0],element.split(":")[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return json_object;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static void initialize_global_prediction_attribute_data(){
|
||||||
|
Logger.getAnonymousLogger().log(warning_logging_level,"Get global prediction attribute data needs implementation");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private static PredictionAttributeSet get_prediction_attribute_set(ArrayList<SLORule> rules){
|
||||||
|
//usedglobalHashmap: attribute_data,
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static PredictionAttributeSet initialize_with_existing_values(Double cpu_value, Double ram_value) {
|
||||||
|
ArrayList<String> metric_names = new ArrayList<>(){{
|
||||||
|
add("cpu");
|
||||||
|
add("ram");
|
||||||
|
add("hard_disk");
|
||||||
|
}};
|
||||||
|
RealtimeMonitoringAttribute.initialize_monitoring_attribute_rates_of_change(metric_names);
|
||||||
|
RealtimeMonitoringAttribute.update_monitoring_attribute_value("cpu",cpu_value);
|
||||||
|
RealtimeMonitoringAttribute.update_monitoring_attribute_value("ram",ram_value);
|
||||||
|
|
||||||
|
PredictedMonitoringAttribute cpuPredictionAttribute = new PredictedMonitoringAttribute("cpu", 70,1, 90.0,80,10,System.currentTimeMillis(),System.currentTimeMillis()+10000);
|
||||||
|
PredictedMonitoringAttribute ramPredictionAttribute = new PredictedMonitoringAttribute("ram", 50,2, 70.0,80,10,System.currentTimeMillis(),System.currentTimeMillis()+10000);
|
||||||
|
|
||||||
|
|
||||||
|
PredictionAttributeSet predictionAttributeSet = new PredictionAttributeSet(new ArrayList<>(){{add(cpuPredictionAttribute);add(ramPredictionAttribute);}});
|
||||||
|
|
||||||
|
return predictionAttributeSet;
|
||||||
|
}
|
||||||
|
private static PredictionAttributeSet pseudo_initialize() throws Exception {
|
||||||
|
ArrayList<String> metric_names = new ArrayList<>(){{
|
||||||
|
add("cpu");
|
||||||
|
add("ram");
|
||||||
|
add("hard_disk");
|
||||||
|
}};
|
||||||
|
RealtimeMonitoringAttribute.initialize_monitoring_attribute_rates_of_change(metric_names);
|
||||||
|
|
||||||
|
//initial cpu values
|
||||||
|
ArrayList<Double> cpu_values = new ArrayList<>();
|
||||||
|
cpu_values.add(10.0);
|
||||||
|
cpu_values.add(20.0);
|
||||||
|
cpu_values.add(30.0);
|
||||||
|
cpu_values.add(40.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(40.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
cpu_values.add(50.0);
|
||||||
|
|
||||||
|
MonitoringAttributeUtilities.initialize_values("cpu",MathUtils.get_average(cpu_values));
|
||||||
|
|
||||||
|
//initial ram values
|
||||||
|
ArrayList<Double> ram_values = new ArrayList<>();
|
||||||
|
ram_values.add(20.0);
|
||||||
|
ram_values.add(20.0);
|
||||||
|
ram_values.add(25.0);
|
||||||
|
ram_values.add(45.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(40.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
ram_values.add(30.0);
|
||||||
|
MonitoringAttributeUtilities.initialize_values("ram",MathUtils.get_average(ram_values));
|
||||||
|
|
||||||
|
//Get prediction_attribute_sets and calculate method 1 on top of them
|
||||||
|
//Get individual prediction_attributes and calculate method 2 on top of them
|
||||||
|
|
||||||
|
PredictedMonitoringAttribute cpuPredictionAttribute = new PredictedMonitoringAttribute("cpu", 70,1, 90.0,80,10,System.currentTimeMillis(),System.currentTimeMillis()+10000);
|
||||||
|
PredictedMonitoringAttribute ramPredictionAttribute = new PredictedMonitoringAttribute("ram", 50,2, 70.0,80,10,System.currentTimeMillis(),System.currentTimeMillis()+10000);
|
||||||
|
|
||||||
|
|
||||||
|
PredictionAttributeSet predictionAttributeSet = new PredictionAttributeSet(new ArrayList<>(){{add(cpuPredictionAttribute);add(ramPredictionAttribute);}});
|
||||||
|
|
||||||
|
return predictionAttributeSet;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static ArrayList<AttributeSubscription> initialize_attribute_subscribers(ArrayList<SLORule> rules_list, String broker_ip_address, String broker_username, String broker_password){
|
||||||
|
ArrayList<AttributeSubscription> attribute_subscribers = new ArrayList<>();
|
||||||
|
for (SLORule rule:rules_list){
|
||||||
|
attribute_subscribers.add(new AttributeSubscription(rule,broker_ip_address,broker_username,broker_password));
|
||||||
|
}
|
||||||
|
return attribute_subscribers;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void initialize_slo_processing(ArrayList<SLORule> rules_list){
|
||||||
|
|
||||||
|
for (SLORule rule:rules_list) {
|
||||||
|
|
||||||
|
Thread severity_calculation_thread = new Thread(() -> {
|
||||||
|
BrokerPublisher persistent_publisher = new BrokerPublisher(topic_for_severity_announcement, prop.getProperty("broker_ip_url"), prop.getProperty("broker_username"), prop.getProperty("broker_password"), amq_library_configuration_location);
|
||||||
|
|
||||||
|
while (!stop_signal.get()) {
|
||||||
|
/*try {
|
||||||
|
Thread.sleep(time_horizon_seconds*1000);
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}*/
|
||||||
|
synchronized (PREDICTION_EXISTS) {
|
||||||
|
while (!PREDICTION_EXISTS.getValue()) {
|
||||||
|
try {
|
||||||
|
PREDICTION_EXISTS.wait();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
synchronized (stop_signal) {
|
||||||
|
if (stop_signal.get()) {
|
||||||
|
running_threads.remove("severity_calculation_thread_" + rule.toString());
|
||||||
|
PREDICTION_EXISTS.setValue(false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
Clock clock = Clock.systemUTC();
|
||||||
|
Long current_time = clock.millis();
|
||||||
|
Long targeted_prediction_time;
|
||||||
|
synchronized (ADAPTATION_TIMES_MODIFY) {
|
||||||
|
while (!ADAPTATION_TIMES_MODIFY.getValue()) {
|
||||||
|
try {
|
||||||
|
ADAPTATION_TIMES_MODIFY.wait();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Logger.getAnonymousLogger().log(warning_logging_level, "Interrupted while waiting to access the lock for adaptation times object");
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ADAPTATION_TIMES_MODIFY.setValue(false);
|
||||||
|
clean_data(adaptation_times_to_remove);
|
||||||
|
//targeted_prediction_time = adaptation_times.stream().min(Long::compare).get();
|
||||||
|
targeted_prediction_time = get_next_targeted_prediction_time();
|
||||||
|
ADAPTATION_TIMES_MODIFY.setValue(true);
|
||||||
|
ADAPTATION_TIMES_MODIFY.notifyAll();
|
||||||
|
}
|
||||||
|
if (targeted_prediction_time==null){
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Targeted_prediction_time " + targeted_prediction_time);
|
||||||
|
Thread internal_severity_calculation_thread = new Thread(() -> {
|
||||||
|
try {
|
||||||
|
/*
|
||||||
|
synchronized (ADAPTATION_TIMES_MODIFY) {
|
||||||
|
while (!ADAPTATION_TIMES_MODIFY.getValue()) {
|
||||||
|
ADAPTATION_TIMES_MODIFY.wait();
|
||||||
|
}
|
||||||
|
ADAPTATION_TIMES_MODIFY.setValue(false);
|
||||||
|
adaptation_times.remove(targeted_prediction_time);//remove from the list of timepoints which should be processed. Later this timepoint will be added to the adaptation_times_to_remove HashSet to remove any data associated with it
|
||||||
|
ADAPTATION_TIMES_MODIFY.setValue(true);
|
||||||
|
ADAPTATION_TIMES_MODIFY.notifyAll();
|
||||||
|
}
|
||||||
|
//adaptation_times_pending_processing.add(targeted_prediction_time);
|
||||||
|
|
||||||
|
*/
|
||||||
|
synchronized (PREDICTION_EXISTS) {
|
||||||
|
PREDICTION_EXISTS.setValue(adaptation_times.size() > 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
Long sleep_time = targeted_prediction_time * 1000 - time_horizon_seconds * 1000L - current_time;
|
||||||
|
if (sleep_time <= 0) {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Prediction cancelled as targeted prediction time was " + targeted_prediction_time * 1000 + " current time is " + current_time + " and the time_horizon is " + time_horizon_seconds * 1000);
|
||||||
|
return; //The predictions are too near to the targeted reconfiguration time (or are even obsolete)
|
||||||
|
} else if (sleep_time > current_time + maximum_acceptable_forward_predictions * time_horizon_seconds * 1000L) {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Prediction cancelled as targeted prediction time was " + targeted_prediction_time * 1000 + " and the current time is " + current_time + ". The prediction is more than " + maximum_acceptable_forward_predictions + " time_horizon intervals into the future (the time_horizon is " + time_horizon_seconds * 1000 + " milliseconds)");
|
||||||
|
return; //The predictions are too near to the targeted reconfiguration tim
|
||||||
|
}
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Sleeping for " + sleep_time + " milliseconds");
|
||||||
|
sleep(sleep_time);
|
||||||
|
double rule_severity = process_rule_value(rule.getRule_representation(), targeted_prediction_time, rule.getRule_format());
|
||||||
|
double slo_violation_probability = determine_slo_violation_probability(rule_severity);
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "The overall " + slo_violation_determination_method + " severity - calculated from real data - for adaptation time " + targeted_prediction_time + " ( " + (new Date((new Timestamp(targeted_prediction_time * 1000)).getTime())) + " ) is " + rule_severity + " and is calculated " + time_horizon_seconds + " seconds beforehand");
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "The probability of an SLO violation is " + ((int) (slo_violation_probability * 100)) + "%" + (slo_violation_probability< slo_violation_probability_threshold ?" so it will not be published":" and it will be published"));
|
||||||
|
|
||||||
|
if (slo_violation_probability>= slo_violation_probability_threshold) {
|
||||||
|
JSONObject severity_json = new JSONObject();
|
||||||
|
severity_json.put("severity", rule_severity);
|
||||||
|
severity_json.put("probability", slo_violation_probability);
|
||||||
|
severity_json.put("predictionTime", targeted_prediction_time);
|
||||||
|
persistent_publisher.publish(severity_json.toJSONString());
|
||||||
|
}
|
||||||
|
|
||||||
|
slo_violation_event_recording_queue.add(System.currentTimeMillis());
|
||||||
|
|
||||||
|
//Probably not necessary to synchronize the line below as each removal will happen only once in a reconfiguration interval, and reconfiguration intervals are assumed to have a duration of minutes.
|
||||||
|
//Necessary to synchronize because another severity calculation thread might invoke clean_data above, and then a concurrent modification exception may arise
|
||||||
|
synchronized (ADAPTATION_TIMES_MODIFY){
|
||||||
|
while (!ADAPTATION_TIMES_MODIFY.getValue()){
|
||||||
|
ADAPTATION_TIMES_MODIFY.wait();
|
||||||
|
}
|
||||||
|
ADAPTATION_TIMES_MODIFY.setValue(false);
|
||||||
|
adaptation_times_to_remove.add(targeted_prediction_time); //This line serves a different purpose from the adaptation_times.remove(...) directive above, as the adaptation_times_to_remove HashSet contains timepoints which should be processed to delete their data.
|
||||||
|
adaptation_times_pending_processing.remove(targeted_prediction_time);
|
||||||
|
ADAPTATION_TIMES_MODIFY.setValue(true);
|
||||||
|
ADAPTATION_TIMES_MODIFY.notifyAll();
|
||||||
|
}
|
||||||
|
} catch (InterruptedException i) {
|
||||||
|
Logger.getAnonymousLogger().log(severe_logging_level, "Severity calculation thread for epoch time " + targeted_prediction_time + " interrupted, stopping...");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
internal_severity_calculation_thread.setName("internal_severity_calculation_thread_" + targeted_prediction_time);
|
||||||
|
internal_severity_calculation_thread.start();
|
||||||
|
} catch (NoSuchElementException n) {
|
||||||
|
Logger.getAnonymousLogger().log(warning_logging_level, "Could not calculate severity as a value was missing...");
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
running_threads.remove("severity_calculation_thread_"+rule.toString());
|
||||||
|
});
|
||||||
|
String severity_calculation_thread_name = "severity_calculation_thread_"+rule.toString();
|
||||||
|
severity_calculation_thread.setName(severity_calculation_thread_name);
|
||||||
|
severity_calculation_thread.start();
|
||||||
|
running_threads.put(severity_calculation_thread_name,severity_calculation_thread);
|
||||||
|
}/*
|
||||||
|
while (true){
|
||||||
|
|
||||||
|
}*/
|
||||||
|
|
||||||
|
//return slo_processors;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Long get_next_targeted_prediction_time() {
|
||||||
|
List<Long> possible_targeted_prediction_times = adaptation_times.stream().sorted().limit(maximum_acceptable_forward_predictions).collect(Collectors.toList());
|
||||||
|
for (int i=0; i<possible_targeted_prediction_times.size(); i++){
|
||||||
|
Long possible_targeted_adaptation_time = possible_targeted_prediction_times.get(i);
|
||||||
|
if (!adaptation_times_pending_processing.contains(possible_targeted_adaptation_time)){
|
||||||
|
adaptation_times.remove(possible_targeted_adaptation_time);
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Removing targeted prediction time "+possible_targeted_adaptation_time+" as it is going to be used");
|
||||||
|
adaptation_times_pending_processing.add(possible_targeted_adaptation_time);
|
||||||
|
return possible_targeted_adaptation_time;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void clean_data(HashSet<Long> adaptation_times_to_remove) {
|
||||||
|
for (Long processed_adaptation_time:adaptation_times_to_remove){
|
||||||
|
if (processed_adaptation_time>last_processed_adaptation_time){
|
||||||
|
last_processed_adaptation_time = processed_adaptation_time;
|
||||||
|
}
|
||||||
|
synchronized (Main.can_modify_slo_rules) {
|
||||||
|
while (!Main.can_modify_slo_rules.getValue()) {
|
||||||
|
try {
|
||||||
|
can_modify_slo_rules.wait();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_modify_slo_rules.setValue(false);
|
||||||
|
for (SLORule slo_rule : slo_rules) {
|
||||||
|
for (SLOSubRule subrule : slo_rule.getSlo_subrules()) {
|
||||||
|
if (getPredicted_monitoring_attributes().containsKey(subrule.getId())) {
|
||||||
|
getPredicted_monitoring_attributes().get(subrule.getId()).remove(processed_adaptation_time);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
can_modify_slo_rules.setValue(true);
|
||||||
|
can_modify_slo_rules.notifyAll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function determines the probability of an SLO violation
|
||||||
|
* @param rule_severity The severity of the rule which has been determined
|
||||||
|
* @return The probability of the rule being violated. The minimum value of this probability is 0, and increases as the severity increases
|
||||||
|
*/
|
||||||
|
public static double determine_slo_violation_probability(double rule_severity) {
|
||||||
|
if (slo_violation_determination_method.equals("all-metrics")) {
|
||||||
|
//39.64 is the mean severity value when examining all integer severity values for roc x probability x confidence_interval x delta_value in (-100,100)x(0,100)x(0,100)x(-100,100)
|
||||||
|
/*
|
||||||
|
if (rule_severity >= 40) {
|
||||||
|
return Math.min((50 + 50*(rule_severity - 40) / 60)/100,1); // in case we desire the probability to start from 50%
|
||||||
|
// return Math.min((100*(rule_severity - 40) / 60)/100,1); // in case we desire the probability to start from 0%
|
||||||
|
} else {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
*/
|
||||||
|
return Math.min(rule_severity/100,100);
|
||||||
|
}else if (slo_violation_determination_method.equals("prconf-delta")){
|
||||||
|
//Logger.getAnonymousLogger().log(warning_logging_level,"The calculation of probability for the prconf-delta method needs to be implemented");
|
||||||
|
//return 0;
|
||||||
|
if (rule_severity >= 6.52){
|
||||||
|
return Math.min((50+50*(rule_severity-6.52)/93.48)/100,1);
|
||||||
|
}else{
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
}else{
|
||||||
|
Logger.getAnonymousLogger().log(warning_logging_level,"Unknown severity calculation method");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static AtomicBoolean getStop_signal() {
|
||||||
|
return stop_signal;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
314
slo-violation-detector/src/main/java/slo_processing/SLORule.java
Normal file
314
slo-violation-detector/src/main/java/slo_processing/SLORule.java
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package slo_processing;
|
||||||
|
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.json.simple.parser.ParseException;
|
||||||
|
import runtime.Main;
|
||||||
|
import utilities.MathUtils;
|
||||||
|
import utilities.SLOViolationCalculator;
|
||||||
|
import utility_beans.RealtimeMonitoringAttribute;
|
||||||
|
import utility_beans.PredictedMonitoringAttribute;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static configuration.Constants.*;
|
||||||
|
import static slo_processing.SLOSubRule.find_rule_type;
|
||||||
|
import static utility_beans.PredictedMonitoringAttribute.getPredicted_monitoring_attributes;
|
||||||
|
|
||||||
|
public class SLORule {
|
||||||
|
private static HashMap<String,Integer> attribute_ids = new HashMap<>(6);
|
||||||
|
private ArrayList<SLOSubRule> slo_subrules = new ArrayList<>(6);
|
||||||
|
private ArrayList<String> monitoring_attributes = new ArrayList<>();
|
||||||
|
private JSONObject rule_representation;
|
||||||
|
private SLOFormatVersion rule_format;
|
||||||
|
private enum SLOFormatVersion {invalid,older,newer}
|
||||||
|
private static int id = 0;
|
||||||
|
//double SLO_severity;
|
||||||
|
|
||||||
|
public SLORule(String rule_representation, ArrayList<String> metric_list){
|
||||||
|
for (String metric: metric_list) {
|
||||||
|
RealtimeMonitoringAttribute monitoring_attribute;
|
||||||
|
if (!RealtimeMonitoringAttribute.getMonitoring_attributes().containsKey(metric)){
|
||||||
|
monitoring_attribute = new RealtimeMonitoringAttribute(metric);
|
||||||
|
RealtimeMonitoringAttribute.getMonitoring_attributes().put(metric,monitoring_attribute);
|
||||||
|
}
|
||||||
|
monitoring_attributes.add(metric);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
this.rule_representation = (JSONObject) new JSONParser().parse(rule_representation);
|
||||||
|
}catch (ParseException p){
|
||||||
|
p.printStackTrace();
|
||||||
|
}
|
||||||
|
this.rule_format = find_rule_format(this.rule_representation);
|
||||||
|
this.slo_subrules = parse_subrules(this.rule_representation,this.rule_format);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static SLOFormatVersion find_rule_format(JSONObject rule_representation) {
|
||||||
|
String rule_id = (String) rule_representation.get("id");
|
||||||
|
String rule_name = (String) rule_representation.get("name");
|
||||||
|
SLOFormatVersion rule_format = SLOFormatVersion.invalid;
|
||||||
|
if (rule_id!=null){
|
||||||
|
rule_format = SLOFormatVersion.older;
|
||||||
|
}
|
||||||
|
else if (rule_name!=null){
|
||||||
|
rule_format = SLOFormatVersion.newer;
|
||||||
|
}else {
|
||||||
|
try{
|
||||||
|
Exception e = new Exception("An invalid rule was sent to the SLO Violation detector - ignoring the rule having the following representation\n"+rule_representation.toJSONString());
|
||||||
|
}catch (Exception e){
|
||||||
|
Logger.getAnonymousLogger().log(Level.SEVERE,"An invalid rule was sent to the SLO Violation detector");
|
||||||
|
return rule_format;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rule_format;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ArrayList<SLOSubRule> parse_subrules(JSONObject rule_json, SLOFormatVersion rule_format){
|
||||||
|
ArrayList<SLOSubRule> subrules = new ArrayList<>(); //initialization
|
||||||
|
String rule_id = (String) rule_json.get("id");
|
||||||
|
String rule_operator = (String) rule_json.get("operator");
|
||||||
|
//First checking for older-format rules, then newer format rules
|
||||||
|
if (rule_format.equals(SLOFormatVersion.older)){
|
||||||
|
if (is_composite_rule_from_id(rule_id)) {
|
||||||
|
JSONArray subrules_json_array = (JSONArray) rule_json.get(rule_id);
|
||||||
|
for (Object subrule : subrules_json_array) {
|
||||||
|
JSONObject json_subrule = (JSONObject) subrule;
|
||||||
|
subrules.addAll(parse_subrules(json_subrule,rule_format));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
String attribute = (String) rule_json.get("attribute");
|
||||||
|
String threshold = (String) rule_json.get("threshold");
|
||||||
|
if (rule_operator.equals("<>")) {
|
||||||
|
subrules.add(new SLOSubRule(attribute, "<", Double.parseDouble(threshold), Integer.parseInt(rule_id)));
|
||||||
|
subrules.add(new SLOSubRule(attribute, ">", Double.parseDouble(threshold), Integer.parseInt(rule_id)+1000000)); //assuming that there are less than 1000000 subrules
|
||||||
|
} else {
|
||||||
|
subrules.add(new SLOSubRule(attribute, rule_operator, Double.parseDouble(threshold), Integer.parseInt(rule_id)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//newer format
|
||||||
|
else if (rule_format.equals(SLOFormatVersion.newer)){
|
||||||
|
if (is_composite_rule_from_operator(rule_operator)) {
|
||||||
|
JSONArray subrules_json_array = (JSONArray) rule_json.get("constraints");
|
||||||
|
for (Object subrule : subrules_json_array) {
|
||||||
|
JSONObject json_subrule = (JSONObject) subrule;
|
||||||
|
subrules.addAll(parse_subrules(json_subrule,rule_format));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
String attribute = (String) rule_json.get("metric");
|
||||||
|
Double threshold = (Double) rule_json.get("threshold");
|
||||||
|
if (is_composite_rule_from_threshold_operator(rule_operator)) {
|
||||||
|
subrules.add(new SLOSubRule(attribute, ">", threshold,get_id_for(attribute)));
|
||||||
|
subrules.add(new SLOSubRule(attribute, "<", threshold,get_id_for(attribute))); //TODO perhaps here change the id
|
||||||
|
}else{
|
||||||
|
subrules.add(new SLOSubRule(attribute, rule_operator, threshold,get_id_for(attribute)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return subrules;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
*This method is used to assign id's to attribute names and create a mapping between them, in the case of 'newer' format SLO definitions
|
||||||
|
* @param attribute The name of the monitoring metric (attribute) for which the rule is formulated
|
||||||
|
* @return An Integer identifier
|
||||||
|
*/
|
||||||
|
private static Integer get_id_for(String attribute) {
|
||||||
|
if (attribute_ids.containsKey(attribute)){
|
||||||
|
return attribute_ids.get(attribute);
|
||||||
|
}else {
|
||||||
|
return attribute_ids.put(attribute, id++);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static double process_rule_value(JSONObject rule_json,Long targeted_prediction_time, SLOFormatVersion rule_format) {
|
||||||
|
|
||||||
|
StringBuilder calculation_logging_string = new StringBuilder();
|
||||||
|
double rule_result_value = -1; //initialization
|
||||||
|
String rule_id = (String)rule_json.get("id");
|
||||||
|
String rule_operator = (String)rule_json.get("operator");
|
||||||
|
String rule_metric = (String)rule_json.get("metric");
|
||||||
|
Double rule_threshold = (Double)rule_json.get("threshold");
|
||||||
|
|
||||||
|
JSONArray subrules_json_array = new JSONArray();
|
||||||
|
boolean composite_rule = false;
|
||||||
|
boolean special_operator_subrule = false;
|
||||||
|
//older format
|
||||||
|
if ((rule_format.equals(SLOFormatVersion.older)) && is_composite_rule_from_id(rule_id)) {
|
||||||
|
subrules_json_array = (JSONArray) rule_json.get(rule_id);
|
||||||
|
composite_rule = true;
|
||||||
|
}
|
||||||
|
//newer format
|
||||||
|
else if ((rule_format.equals(SLOFormatVersion.newer)) && (is_composite_rule_from_operator(rule_operator))){
|
||||||
|
subrules_json_array = (JSONArray) rule_json.get("constraints");
|
||||||
|
composite_rule = true;
|
||||||
|
}
|
||||||
|
else if (is_composite_rule_from_threshold_operator(rule_operator)){
|
||||||
|
subrules_json_array = new JSONArray();
|
||||||
|
composite_rule = true;
|
||||||
|
special_operator_subrule = true;
|
||||||
|
// operator, metric, name (unnecessary), threshold
|
||||||
|
//create simple json object for the two deterministic subrules (the greater-than and the less-than)
|
||||||
|
JSONObject first_simple_subrule_json = new JSONObject();
|
||||||
|
JSONObject second_simple_subrule_json = new JSONObject();
|
||||||
|
first_simple_subrule_json.put("operator",">");
|
||||||
|
first_simple_subrule_json.put("threshold",rule_threshold);
|
||||||
|
first_simple_subrule_json.put("metric",rule_metric);
|
||||||
|
second_simple_subrule_json.put("operator","<");
|
||||||
|
second_simple_subrule_json.put("threshold",rule_threshold);
|
||||||
|
second_simple_subrule_json.put("metric",rule_metric);
|
||||||
|
subrules_json_array.add(first_simple_subrule_json);
|
||||||
|
subrules_json_array.add(second_simple_subrule_json);
|
||||||
|
//subrules_json_array.add
|
||||||
|
}
|
||||||
|
if (composite_rule){
|
||||||
|
ArrayList<Double> individual_severity_contributions = new ArrayList<>();
|
||||||
|
boolean and_subrules_invalidated = false;
|
||||||
|
for (Object subrule: subrules_json_array) {
|
||||||
|
JSONObject json_subrule = (JSONObject) subrule;
|
||||||
|
//String json_subrule_id = (String) json_subrule.get("id");
|
||||||
|
double subrule_result = process_rule_value(json_subrule,targeted_prediction_time,rule_format);
|
||||||
|
calculation_logging_string.append("\nThe severity calculation for subrule ").append(json_subrule).append(" is ").append(subrule_result).append("\n");
|
||||||
|
String logical_operator = EMPTY;
|
||||||
|
if (rule_format.equals(SLOFormatVersion.older)){
|
||||||
|
logical_operator = (get_logical_operator_part(rule_id)).toLowerCase();
|
||||||
|
}else if (rule_format.equals(SLOFormatVersion.newer)){
|
||||||
|
logical_operator = rule_operator.toLowerCase();
|
||||||
|
}
|
||||||
|
if (special_operator_subrule){
|
||||||
|
logical_operator = "or";
|
||||||
|
}
|
||||||
|
if (logical_operator.equals("and")){
|
||||||
|
if (subrule_result<0){
|
||||||
|
//return -1; //all other rules are invalidated
|
||||||
|
and_subrules_invalidated = true;
|
||||||
|
}else {
|
||||||
|
if (!and_subrules_invalidated /*&& !is_composite_rule(json_subrule_id)*/) {
|
||||||
|
//individual_severity_contributions.add(MonitoringAttribute.get_monitoring_attributes_values_map().get((String)json_subrule.get("id")));
|
||||||
|
//individual_severity_contributions.add(PredictionAttribute.getPredicted_attributes_values().get((String)json_subrule.get("id")));
|
||||||
|
individual_severity_contributions.add(subrule_result);
|
||||||
|
|
||||||
|
}
|
||||||
|
//rule_result_value = calculate_severity(and_subrule_severity_values);
|
||||||
|
}
|
||||||
|
}else if (logical_operator.equals("or")){
|
||||||
|
rule_result_value = Math.max(rule_result_value,subrule_result);
|
||||||
|
calculation_logging_string.append("Calculating maximum of individual severity contributions - current is ").append(rule_result_value).append(" prospective higher severity is ").append(subrule_result).append("\n");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (slo_violation_determination_method.equals("all-metrics")&& individual_severity_contributions.size()>0) {
|
||||||
|
rule_result_value = MathUtils.get_average(individual_severity_contributions);
|
||||||
|
calculation_logging_string.append("Calculating average of individual severity contributions: ").append(individual_severity_contributions).append(" equals ").append(rule_result_value).append("\n");
|
||||||
|
}else if (slo_violation_determination_method.equals("prconf-delta") && individual_severity_contributions.size()>0){
|
||||||
|
rule_result_value = Math.sqrt(MathUtils.sum(individual_severity_contributions.stream().map(x->x*x).collect(Collectors.toList())))/Math.sqrt(individual_severity_contributions.size());
|
||||||
|
calculation_logging_string.append("Calculating square root of sum of individual severity contributions: ").append(individual_severity_contributions).append(" - the result is ").append(rule_result_value).append("\n");
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
//Debugging information logging
|
||||||
|
if (rule_result_value>10000){
|
||||||
|
calculation_logging_string.append("\nDue to the severity value being over 10000, it is replaced by 10000");
|
||||||
|
rule_result_value = 10000;
|
||||||
|
}
|
||||||
|
Main.severity_calculation_event_recording_queue.add(calculation_logging_string.toString());
|
||||||
|
return rule_result_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
else{
|
||||||
|
//String attribute_name = (String) rule_json.get("attribute");
|
||||||
|
//String operator = (String) rule_json.get("operator");
|
||||||
|
SLOSubRule.RuleType rule_type = find_rule_type(rule_operator);
|
||||||
|
//double threshold = Double.parseDouble((String) rule_json.get("threshold"));
|
||||||
|
Integer subrule_id=0;
|
||||||
|
if (rule_format.equals(SLOFormatVersion.older)) {
|
||||||
|
subrule_id = Integer.parseInt((String) rule_json.get("id"));
|
||||||
|
}else if (rule_format.equals(SLOFormatVersion.newer)){
|
||||||
|
subrule_id = get_id_for(rule_metric);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getPredicted_monitoring_attributes().get(subrule_id)== null){
|
||||||
|
rule_result_value = -1;
|
||||||
|
return rule_result_value;
|
||||||
|
}
|
||||||
|
PredictedMonitoringAttribute new_prediction_attribute = getPredicted_monitoring_attributes().get(subrule_id).get(targeted_prediction_time);
|
||||||
|
|
||||||
|
if (new_prediction_attribute==null || !new_prediction_attribute.isInitialized() || new_prediction_attribute.getDelta_for_less_than_rule()<LOWER_LIMIT_DELTA || new_prediction_attribute.getDelta_for_greater_than_rule() < LOWER_LIMIT_DELTA){ //delta is normalized so only this case is examined here
|
||||||
|
rule_result_value = -1;
|
||||||
|
}else {
|
||||||
|
rule_result_value = 1; //not a real value, but a positive number to signify that there is a threshold violation
|
||||||
|
if (slo_violation_determination_method.equals("all-metrics")){
|
||||||
|
rule_result_value = SLOViolationCalculator.get_Severity_all_metrics_method(new_prediction_attribute,rule_type);
|
||||||
|
}else if (slo_violation_determination_method.equals("prconf-delta")){
|
||||||
|
rule_result_value = SLOViolationCalculator.get_Severity_prconf_delta_method(new_prediction_attribute,rule_type);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
calculation_logging_string.append(dashed_line).append("\nThe severity calculation for simple subrule ").append(rule_metric).append(rule_operator).append(rule_threshold).append(" is ").append(rule_result_value).append(dashed_line);
|
||||||
|
return rule_result_value;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean is_composite_rule_from_id(String rule_id) {
|
||||||
|
String rule_id_logical_operator_part = get_logical_operator_part(rule_id);
|
||||||
|
return (rule_id_logical_operator_part!=null && !rule_id_logical_operator_part.isEmpty() && Arrays.stream(logic_operators).anyMatch(e-> e.equals(rule_id_logical_operator_part)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean is_composite_rule_from_operator (String operator){
|
||||||
|
String lowercase_operator = operator.toLowerCase();
|
||||||
|
return (!operator.isEmpty() && Arrays.stream(logic_operators).anyMatch(e-> e.equals(lowercase_operator)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean is_composite_rule_from_threshold_operator (String operator){
|
||||||
|
return operator.equals("<>");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String get_logical_operator_part(String rule_id) {
|
||||||
|
return rule_id.split("-")[0]; //possibly contains the name of a logical operator
|
||||||
|
}
|
||||||
|
|
||||||
|
private static double calculate_severity(ArrayList<Double> and_subrule_severity_values) {
|
||||||
|
double severity_value;
|
||||||
|
double severity_sum = 0;
|
||||||
|
for (Double value : and_subrule_severity_values){
|
||||||
|
severity_sum = severity_sum + value*value;
|
||||||
|
}
|
||||||
|
severity_value = Math.sqrt(severity_sum)/(100*Math.sqrt(and_subrule_severity_values.size()));
|
||||||
|
return severity_value;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public JSONObject getRule_representation() {
|
||||||
|
return rule_representation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public ArrayList<String> get_monitoring_attributes (){
|
||||||
|
return monitoring_attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public ArrayList<SLOSubRule> getSlo_subrules() {
|
||||||
|
return slo_subrules;
|
||||||
|
}
|
||||||
|
|
||||||
|
public SLOFormatVersion getRule_format() {
|
||||||
|
return rule_format;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRule_format(SLOFormatVersion rule_format) {
|
||||||
|
this.rule_format = rule_format;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,13 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package slo_processing;
|
||||||
|
|
||||||
|
public enum SLORuleJoin {
|
||||||
|
and,or
|
||||||
|
}
|
@ -0,0 +1,89 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package slo_processing;
|
||||||
|
|
||||||
|
import utility_beans.PredictedMonitoringAttribute;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
|
||||||
|
public class SLOSubRule {
|
||||||
|
public enum RuleType {greater_than_rule,less_than_rule,equal_rule,unequal_rule,unknown}
|
||||||
|
private RuleType rule_type;
|
||||||
|
private static HashMap<String, ArrayList<SLOSubRule>> slo_subrules_per_monitoring_attribute = new HashMap<>();
|
||||||
|
private String metric;
|
||||||
|
private String operator;
|
||||||
|
private SLORuleJoin rule_join_type;
|
||||||
|
private Double threshold;
|
||||||
|
private Integer id;
|
||||||
|
private PredictedMonitoringAttribute associated_predicted_monitoring_attribute;
|
||||||
|
|
||||||
|
public SLOSubRule(String metric, String operator, Double threshold,Integer id){
|
||||||
|
this.metric = metric;
|
||||||
|
this.operator = operator;
|
||||||
|
this.threshold = threshold;
|
||||||
|
this.id = id;
|
||||||
|
this.associated_predicted_monitoring_attribute = new PredictedMonitoringAttribute(metric);
|
||||||
|
this.rule_type = find_rule_type(operator);
|
||||||
|
}
|
||||||
|
public static RuleType find_rule_type(String operator){
|
||||||
|
RuleType rule_type = RuleType.unknown;
|
||||||
|
if (operator.equals(">") || operator.equals(">=")){
|
||||||
|
rule_type = RuleType.greater_than_rule;
|
||||||
|
}else if (operator.equals("<") || operator.equals("<=")){
|
||||||
|
rule_type = RuleType.less_than_rule;
|
||||||
|
}else if (operator.equals("==")){
|
||||||
|
rule_type = RuleType.equal_rule;
|
||||||
|
}else if (operator.equals("<>")){
|
||||||
|
rule_type = RuleType.unequal_rule; //although this rule type will never be handled independently
|
||||||
|
}
|
||||||
|
return rule_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getMetric() {
|
||||||
|
return metric;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getOperator() {
|
||||||
|
return operator;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Double getThreshold() {
|
||||||
|
return threshold;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Integer getId() {
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashMap<String, ArrayList<SLOSubRule>> getSlo_subrules_per_monitoring_attribute() {
|
||||||
|
return slo_subrules_per_monitoring_attribute;
|
||||||
|
}
|
||||||
|
|
||||||
|
public PredictedMonitoringAttribute getAssociated_predicted_monitoring_attribute() {
|
||||||
|
return associated_predicted_monitoring_attribute;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setAssociated_predicted_monitoring_attribute(PredictedMonitoringAttribute associated_predicted_monitoring_attribute) {
|
||||||
|
this.associated_predicted_monitoring_attribute = associated_predicted_monitoring_attribute;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RuleType getRule_type() {
|
||||||
|
return rule_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRule_type(RuleType rule_type) {
|
||||||
|
this.rule_type = rule_type;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString(){
|
||||||
|
return ("The rule is "+metric+operator+threshold+"\n+ The associated Predicted Monitoring Attribute is "+associated_predicted_monitoring_attribute.toString());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,116 @@
|
|||||||
|
package utilities;
|
||||||
|
|
||||||
|
import eu.melodic.event.brokerclient.BrokerPublisher;
|
||||||
|
import eu.melodic.event.brokerclient.BrokerSubscriber;
|
||||||
|
import org.apache.commons.collections4.queue.CircularFifoQueue;
|
||||||
|
import runtime.Main;
|
||||||
|
import slo_processing.SLOSubRule;
|
||||||
|
import utility_beans.RealtimeMonitoringAttribute;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static configuration.Constants.amq_library_configuration_location;
|
||||||
|
import static configuration.Constants.info_logging_level;
|
||||||
|
import static runtime.Main.running_threads;
|
||||||
|
import static runtime.Main.slo_violation_event_recording_queue;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The objective of this class is to allow a structured synopsis of the current state of the SLO Violation Detector to be created, as a response to a request sent to it through an appropriate topic.
|
||||||
|
*/
|
||||||
|
public class DebugDataSubscription {
|
||||||
|
|
||||||
|
private static String debug_data_trigger_topic_name = "sloviolationdetector.debug";
|
||||||
|
private static String debug_data_output_topic_name = "sloviolationdetector.debug_output";
|
||||||
|
private static String broker_username,broker_password,broker_ip_address;
|
||||||
|
static BiFunction <String,String,String> debug_data_generation = (topic, message) ->{
|
||||||
|
|
||||||
|
String output_debug_data = "";
|
||||||
|
StringBuilder intermediate_debug_string = new StringBuilder();
|
||||||
|
intermediate_debug_string = new StringBuilder(intermediate_debug_string + "The following threads are currently running" + "\n");
|
||||||
|
|
||||||
|
boolean flag_first_element_iterated = true;
|
||||||
|
for (String s : running_threads.keySet()){
|
||||||
|
if (flag_first_element_iterated) {
|
||||||
|
intermediate_debug_string.append(s);
|
||||||
|
flag_first_element_iterated = false;
|
||||||
|
}else{
|
||||||
|
intermediate_debug_string.append(",\n").append(s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
intermediate_debug_string.append("\n");
|
||||||
|
output_debug_data = output_debug_data+intermediate_debug_string;
|
||||||
|
intermediate_debug_string = new StringBuilder();
|
||||||
|
|
||||||
|
flag_first_element_iterated = true;
|
||||||
|
intermediate_debug_string.append("The following Monitoring Attribute values are currently stored:\n");
|
||||||
|
for ( Map.Entry<String, RealtimeMonitoringAttribute> entry :RealtimeMonitoringAttribute.getMonitoring_attributes().entrySet() ){
|
||||||
|
if (flag_first_element_iterated){
|
||||||
|
intermediate_debug_string.append("\n- Metric name: ").append(entry.getKey());
|
||||||
|
flag_first_element_iterated = false;
|
||||||
|
}else{
|
||||||
|
intermediate_debug_string.append("- Metric name: ").append(entry.getKey());
|
||||||
|
}
|
||||||
|
|
||||||
|
Double metric_value = RealtimeMonitoringAttribute.get_metric_value(entry.getKey());
|
||||||
|
CircularFifoQueue<Double> metric_values = entry.getValue().getActual_metric_values();
|
||||||
|
if (metric_value.isNaN()){
|
||||||
|
intermediate_debug_string.append(" - value was determined as NaN, individual collected values are ").append(metric_values).append("\n");
|
||||||
|
}
|
||||||
|
else if (metric_value.isInfinite()){
|
||||||
|
intermediate_debug_string.append(" - value was determined as infinite, individual collected values are ").append(metric_values).append("\n");
|
||||||
|
} else {
|
||||||
|
intermediate_debug_string.append(" - value from individual values").append(metric_values).append(" is ").append(metric_value).append("\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
output_debug_data = output_debug_data+intermediate_debug_string;
|
||||||
|
intermediate_debug_string = new StringBuilder();
|
||||||
|
|
||||||
|
intermediate_debug_string.append("The following subrules have been parsed and are stored:\n");
|
||||||
|
for (Map.Entry<String, ArrayList<SLOSubRule>> entry : SLOSubRule.getSlo_subrules_per_monitoring_attribute().entrySet()){
|
||||||
|
intermediate_debug_string.append("Metric name: ").append(entry.getKey());
|
||||||
|
for (SLOSubRule rule : entry.getValue()) {
|
||||||
|
intermediate_debug_string.append("\n").append(rule.toString());
|
||||||
|
}
|
||||||
|
intermediate_debug_string.append("\n");
|
||||||
|
}
|
||||||
|
output_debug_data = output_debug_data+intermediate_debug_string;
|
||||||
|
intermediate_debug_string = new StringBuilder();
|
||||||
|
|
||||||
|
output_debug_data = output_debug_data+"\nShowing the adaptation times that pend processing:\n"+ Main.adaptation_times_pending_processing;
|
||||||
|
intermediate_debug_string.append("\nThese are the timestamps of the latest adaptation events\n").append(slo_violation_event_recording_queue);
|
||||||
|
|
||||||
|
Logger.getGlobal().log(info_logging_level,"Debug data generated:\n"+output_debug_data);
|
||||||
|
BrokerPublisher publisher = new BrokerPublisher(debug_data_output_topic_name, broker_ip_address, broker_username, broker_password, amq_library_configuration_location);
|
||||||
|
publisher.publish(output_debug_data);
|
||||||
|
return output_debug_data;
|
||||||
|
};
|
||||||
|
public static void initiate(String broker_ip_address, String broker_username, String broker_password) {
|
||||||
|
BrokerSubscriber debug_data_subscriber = new BrokerSubscriber(debug_data_trigger_topic_name, broker_ip_address, broker_username, broker_password, amq_library_configuration_location);
|
||||||
|
Thread debug_data_subscription_thread = new Thread(() -> {
|
||||||
|
try {
|
||||||
|
synchronized (Main.HAS_MESSAGE_ARRIVED.get_synchronized_boolean(debug_data_trigger_topic_name)) {
|
||||||
|
//if (Main.HAS_MESSAGE_ARRIVED.get_synchronized_boolean(debug_data_topic_name).getValue())
|
||||||
|
debug_data_subscriber.subscribe(debug_data_generation, Main.stop_signal);
|
||||||
|
}
|
||||||
|
if (Thread.interrupted()) {
|
||||||
|
throw new InterruptedException();
|
||||||
|
}
|
||||||
|
} catch (Exception i) {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Possible interruption of debug data subscriber thread for " + debug_data_trigger_topic_name + " - if not stacktrace follows");
|
||||||
|
if (!(i instanceof InterruptedException)) {
|
||||||
|
i.printStackTrace();
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "Removing debug data subscriber thread for " + debug_data_trigger_topic_name);
|
||||||
|
running_threads.remove("debug_data_subscription_thread_" + debug_data_trigger_topic_name);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
running_threads.put("debug_data_subscription_thread_" + debug_data_trigger_topic_name, debug_data_subscription_thread);
|
||||||
|
debug_data_subscription_thread.start();
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,32 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utilities;
|
||||||
|
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public class MathUtils {
|
||||||
|
public static double get_average(Iterable<Double> values){
|
||||||
|
double sum = 0;
|
||||||
|
int counter = 0;
|
||||||
|
for (Double value : values){
|
||||||
|
sum = sum+value;
|
||||||
|
counter++;
|
||||||
|
}
|
||||||
|
return (sum/counter);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static double sum(Iterable<Double> values) {
|
||||||
|
double sum = 0;
|
||||||
|
for (Double value : values){
|
||||||
|
sum = sum+value;
|
||||||
|
}
|
||||||
|
return sum;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,151 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utilities;
|
||||||
|
|
||||||
|
import slo_processing.SLORule;
|
||||||
|
import slo_processing.SLOSubRule;
|
||||||
|
import utility_beans.PredictedMonitoringAttribute;
|
||||||
|
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static configuration.Constants.info_logging_level;
|
||||||
|
import static configuration.Constants.warning_logging_level;
|
||||||
|
|
||||||
|
public class SLOViolationCalculator {
|
||||||
|
|
||||||
|
public static double get_Severity_all_metrics_method(PredictedMonitoringAttribute predictionAttribute, SLOSubRule.RuleType rule_type){
|
||||||
|
|
||||||
|
double all_metrics_method_attribute_severity;
|
||||||
|
if (rule_type.equals(SLOSubRule.RuleType.greater_than_rule)){
|
||||||
|
double severity_sum = get_greater_than_severity_sum(predictionAttribute);
|
||||||
|
all_metrics_method_attribute_severity = Math.sqrt(severity_sum)/Math.sqrt(3);
|
||||||
|
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The all-metrics attribute severity for a greater-than rule related to attribute " + predictionAttribute.getName() + " based on a (roc,prconf,normalized_interval,delta) quadraplet of (" + predictionAttribute.getRate_of_change_for_greater_than_rule() + "," + predictionAttribute.getProbability_confidence()+ "," + predictionAttribute.getNormalizedConfidenceIntervalWidth()+","+predictionAttribute.getDelta_for_greater_than_rule() + ") is " + all_metrics_method_attribute_severity);
|
||||||
|
if (severity_sum<0){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The NaN severity value is produced due to the root of a negative severity sum");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (rule_type.equals(SLOSubRule.RuleType.less_than_rule)){
|
||||||
|
double severity_sum = get_less_than_severity_sum(predictionAttribute);
|
||||||
|
all_metrics_method_attribute_severity = Math.sqrt(severity_sum)/Math.sqrt(3);
|
||||||
|
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The all-metrics attribute severity for a less-than rule related to attribute " + predictionAttribute.getName() + " based on a (roc,prconf,normalized_interval,delta) quadraplet of (" + predictionAttribute.getRate_of_change_for_less_than_rule() + "," + predictionAttribute.getProbability_confidence()+ "," + predictionAttribute.getNormalizedConfidenceIntervalWidth()+","+predictionAttribute.getDelta_for_less_than_rule() + ") is " + all_metrics_method_attribute_severity);
|
||||||
|
if (severity_sum<0){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The NaN severity value is produced due to the root of a negative severity sum");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if (rule_type.equals(SLOSubRule.RuleType.equal_rule)){
|
||||||
|
|
||||||
|
double greater_than_severity_sum = get_greater_than_severity_sum(predictionAttribute);
|
||||||
|
double less_than_severity_sum = get_less_than_severity_sum(predictionAttribute);
|
||||||
|
|
||||||
|
if (less_than_severity_sum>greater_than_severity_sum){
|
||||||
|
all_metrics_method_attribute_severity = Math.sqrt(less_than_severity_sum)/Math.sqrt(3);
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The all-metrics attribute severity for an 'equals' rule related to attribute " + predictionAttribute.getName() + " based on a (roc,prconf,normalized_interval,delta) quadraplet of (" + predictionAttribute.getRate_of_change_for_less_than_rule() + "," + predictionAttribute.getProbability_confidence()+ "," + predictionAttribute.getNormalizedConfidenceIntervalWidth()+","+predictionAttribute.getDelta_for_less_than_rule() + ") is " + all_metrics_method_attribute_severity);
|
||||||
|
if (less_than_severity_sum<0){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The NaN severity value is produced due to the root of a negative severity sum");
|
||||||
|
}
|
||||||
|
}else{
|
||||||
|
all_metrics_method_attribute_severity = Math.sqrt(greater_than_severity_sum)/Math.sqrt(3);
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The all-metrics attribute severity for a greater-than rule related to attribute " + predictionAttribute.getName() + " based on a (roc,prconf,normalized_interval,delta) quadraplet of (" + predictionAttribute.getRate_of_change_for_greater_than_rule() + "," + predictionAttribute.getProbability_confidence()+ "," + predictionAttribute.getNormalizedConfidenceIntervalWidth()+","+predictionAttribute.getDelta_for_greater_than_rule() + ") is " + all_metrics_method_attribute_severity);
|
||||||
|
if (greater_than_severity_sum<0){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"The NaN severity value is produced due to the root of a negative severity sum");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}else {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"An unknown type of rule was introduced, therefore setting the severity to -1 to prevent any adaptation");
|
||||||
|
all_metrics_method_attribute_severity = -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Double.isNaN(all_metrics_method_attribute_severity) || ( all_metrics_method_attribute_severity<0)){
|
||||||
|
Logger.getAnonymousLogger().log(warning_logging_level,"Negative or NaN severity produced: "+all_metrics_method_attribute_severity+" using 0 instead");
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
return all_metrics_method_attribute_severity; //or think of another way to implement this
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static double get_less_than_severity_sum(PredictedMonitoringAttribute predictionAttribute) {
|
||||||
|
Double roc_sign = predictionAttribute.getRate_of_change_for_less_than_rule()/Math.abs(predictionAttribute.getRate_of_change_for_less_than_rule());
|
||||||
|
Double delta_sign = predictionAttribute.getDelta_for_less_than_rule()/Math.abs(predictionAttribute.getDelta_for_less_than_rule());
|
||||||
|
if (delta_sign.isNaN()){
|
||||||
|
delta_sign = 1.0;
|
||||||
|
}
|
||||||
|
if (roc_sign.isNaN()){
|
||||||
|
roc_sign = 1.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
Double rate_of_change_factor = roc_sign*predictionAttribute.getRate_of_change_for_less_than_rule()*predictionAttribute.getRate_of_change_for_less_than_rule();
|
||||||
|
Double probability_confidence_factor =
|
||||||
|
predictionAttribute.getProbability_confidence()*
|
||||||
|
predictionAttribute.getProbability_confidence()*
|
||||||
|
(100-predictionAttribute.getNormalizedConfidenceIntervalWidth())*
|
||||||
|
(100-predictionAttribute.getNormalizedConfidenceIntervalWidth())/
|
||||||
|
(100*100);//to normalize values
|
||||||
|
Double delta_factor = delta_sign*predictionAttribute.getDelta_for_less_than_rule()*predictionAttribute.getDelta_for_less_than_rule();
|
||||||
|
return rate_of_change_factor+probability_confidence_factor+delta_factor;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static double get_greater_than_severity_sum(PredictedMonitoringAttribute predictionAttribute) {
|
||||||
|
Double roc_sign = predictionAttribute.getRate_of_change_for_greater_than_rule()/Math.abs(predictionAttribute.getRate_of_change_for_greater_than_rule());
|
||||||
|
Double delta_sign = predictionAttribute.getDelta_for_greater_than_rule()/Math.abs(predictionAttribute.getDelta_for_greater_than_rule());
|
||||||
|
if (delta_sign.isNaN()){
|
||||||
|
delta_sign = 1.0;
|
||||||
|
}
|
||||||
|
if (roc_sign.isNaN()){
|
||||||
|
roc_sign = 1.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
double rate_of_change_factor = roc_sign*predictionAttribute.getRate_of_change_for_greater_than_rule()*predictionAttribute.getRate_of_change_for_greater_than_rule();
|
||||||
|
double probability_confidence_factor = 0;
|
||||||
|
if (predictionAttribute.getConfidence_interval_width()<0){
|
||||||
|
probability_confidence_factor = -(100*100);
|
||||||
|
}else{
|
||||||
|
probability_confidence_factor =
|
||||||
|
predictionAttribute.getProbability_confidence()*
|
||||||
|
predictionAttribute.getProbability_confidence()*
|
||||||
|
(100-predictionAttribute.getNormalizedConfidenceIntervalWidth())*
|
||||||
|
(100-predictionAttribute.getNormalizedConfidenceIntervalWidth())/
|
||||||
|
(100*100);//to normalize values
|
||||||
|
}
|
||||||
|
double delta_factor = delta_sign*predictionAttribute.getDelta_for_greater_than_rule()*predictionAttribute.getDelta_for_greater_than_rule();
|
||||||
|
return rate_of_change_factor+probability_confidence_factor+delta_factor;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static double get_Severity_prconf_delta_method(PredictedMonitoringAttribute predictionAttribute,SLOSubRule.RuleType rule_type){
|
||||||
|
|
||||||
|
double severity_sum;
|
||||||
|
if (rule_type.equals(SLOSubRule.RuleType.greater_than_rule)) {
|
||||||
|
severity_sum = (predictionAttribute.getDelta_for_greater_than_rule() * predictionAttribute.getProbability_confidence() * (100 - predictionAttribute.getNormalizedConfidenceIntervalWidth() / 100)) / (100 * 100 * 100); //dividing by 10000 to normalize;
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "The prconf-delta attribute severity for a greater-than rule related to attribute " + predictionAttribute.getName() + " based on a (prconf,delta,confidence_interval) triplet of (" + predictionAttribute.getProbability_confidence() + "," + predictionAttribute.getDelta_for_greater_than_rule() + "," + predictionAttribute.getConfidence_interval_width() + ") is " + severity_sum);
|
||||||
|
}else if (rule_type.equals(SLOSubRule.RuleType.less_than_rule)){
|
||||||
|
severity_sum = (predictionAttribute.getDelta_for_less_than_rule() * predictionAttribute.getProbability_confidence() * (100 - predictionAttribute.getNormalizedConfidenceIntervalWidth() / 100)) / (100 * 100 * 100); //dividing by 10000 to normalize;
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "The prconf-delta attribute severity for a less-than rule related to attribute " + predictionAttribute.getName() + " based on a (prconf,delta,confidence_interval) triplet of (" + predictionAttribute.getProbability_confidence() + "," + predictionAttribute.getDelta_for_less_than_rule() + "," + predictionAttribute.getConfidence_interval_width() + ") is " + severity_sum);
|
||||||
|
}else if (rule_type.equals(SLOSubRule.RuleType.equal_rule)){
|
||||||
|
double greater_than_severity_sum = (predictionAttribute.getDelta_for_greater_than_rule() * predictionAttribute.getProbability_confidence() * (100 - predictionAttribute.getNormalizedConfidenceIntervalWidth() / 100)) / (100 * 100 * 100); //dividing by 10000 to normalize;
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "The prconf-delta attribute severity for a greater-than rule related to attribute " + predictionAttribute.getName() + " based on a (prconf,delta,confidence_interval) triplet of (" + predictionAttribute.getProbability_confidence() + "," + predictionAttribute.getDelta_for_greater_than_rule() + "," + predictionAttribute.getConfidence_interval_width() + ") is " + greater_than_severity_sum);
|
||||||
|
|
||||||
|
|
||||||
|
double less_than_severity_sum = (predictionAttribute.getDelta_for_less_than_rule() * predictionAttribute.getProbability_confidence() * (100 - predictionAttribute.getNormalizedConfidenceIntervalWidth() / 100)) / (100 * 100 * 100); //dividing by 10000 to normalize;
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "The prconf-delta attribute severity for a less-than rule related to attribute " + predictionAttribute.getName() + " based on a (prconf,delta,confidence_interval) triplet of (" + predictionAttribute.getProbability_confidence() + "," + predictionAttribute.getDelta_for_less_than_rule() + "," + predictionAttribute.getConfidence_interval_width() + ") is " + less_than_severity_sum);
|
||||||
|
|
||||||
|
severity_sum = Math.max(less_than_severity_sum,greater_than_severity_sum);
|
||||||
|
}else{
|
||||||
|
severity_sum = -1;
|
||||||
|
}
|
||||||
|
if (severity_sum<0){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"A NaN severity value may be produced due to the root of a negative severity sum - returning zero instead for severity sum");
|
||||||
|
severity_sum = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return severity_sum;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,108 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utility_beans;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public class MonitoringAttributeStatistics {
|
||||||
|
private double current_mean;
|
||||||
|
private double current_dsquared;
|
||||||
|
private int count = 0; //initialize
|
||||||
|
private final boolean hard_upper_bound_is_set,hard_lower_bound_is_set;
|
||||||
|
private double upper_bound, lower_bound;
|
||||||
|
|
||||||
|
public MonitoringAttributeStatistics(){
|
||||||
|
hard_upper_bound_is_set = false;
|
||||||
|
hard_lower_bound_is_set = false;
|
||||||
|
lower_bound = Double.POSITIVE_INFINITY;
|
||||||
|
upper_bound = Double.NEGATIVE_INFINITY;
|
||||||
|
}
|
||||||
|
|
||||||
|
public MonitoringAttributeStatistics(double hard_upper_or_hard_lower_bound, boolean is_hard_upper_bound){
|
||||||
|
if (is_hard_upper_bound){
|
||||||
|
hard_upper_bound_is_set = true;
|
||||||
|
hard_lower_bound_is_set = false;
|
||||||
|
upper_bound = hard_upper_or_hard_lower_bound;
|
||||||
|
lower_bound = Double.POSITIVE_INFINITY;
|
||||||
|
}else{
|
||||||
|
hard_lower_bound_is_set = true;
|
||||||
|
hard_upper_bound_is_set = false;
|
||||||
|
lower_bound = hard_upper_or_hard_lower_bound;
|
||||||
|
upper_bound = Double.NEGATIVE_INFINITY;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public MonitoringAttributeStatistics(double lower_bound, double upper_bound){
|
||||||
|
hard_lower_bound_is_set = true;
|
||||||
|
hard_upper_bound_is_set = true;
|
||||||
|
this.upper_bound = upper_bound;
|
||||||
|
this.lower_bound = lower_bound;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void update_attribute_statistics(double new_attribute_value){
|
||||||
|
count++;
|
||||||
|
|
||||||
|
double mean_differential = (new_attribute_value - current_mean) / count;
|
||||||
|
double new_mean = current_mean + mean_differential;
|
||||||
|
|
||||||
|
double dsquared_increment = (new_attribute_value - new_mean) * (new_attribute_value - current_mean);
|
||||||
|
double new_dsquared = current_dsquared + dsquared_increment;
|
||||||
|
|
||||||
|
current_mean = new_mean;
|
||||||
|
current_dsquared = new_dsquared;
|
||||||
|
|
||||||
|
if (!hard_upper_bound_is_set){
|
||||||
|
if (count==1) {
|
||||||
|
upper_bound = new_attribute_value;
|
||||||
|
}else {
|
||||||
|
|
||||||
|
double candidate_upper_value = new_mean + Math.sqrt(10.0) * Math.sqrt(new_dsquared / (count - 1)); //Chebyshev-based 90th percentile value
|
||||||
|
//if (candidate_upper_value>upper_bound){
|
||||||
|
upper_bound = candidate_upper_value;
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!hard_lower_bound_is_set) {
|
||||||
|
if (count==1){
|
||||||
|
lower_bound = new_attribute_value;
|
||||||
|
}else {
|
||||||
|
double candidate_lower_value = new_mean - Math.sqrt(10.0) * Math.sqrt(new_dsquared / (count - 1)); //Chebyshev-based 90th percentile value
|
||||||
|
//if (candidate_lower_value < lower_bound) {
|
||||||
|
lower_bound = candidate_lower_value;
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getUpper_bound() {
|
||||||
|
return upper_bound;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setUpper_bound(double upper_bound) {
|
||||||
|
this.upper_bound = upper_bound;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getLower_bound() {
|
||||||
|
return lower_bound;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setLower_bound(double lower_bound) {
|
||||||
|
this.lower_bound = lower_bound;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString(){
|
||||||
|
return "Upper bound "+ upper_bound + System.lineSeparator()+
|
||||||
|
"Lower bound "+ lower_bound + System.lineSeparator()+
|
||||||
|
"Count "+ count + System.lineSeparator() +
|
||||||
|
"Mean "+ current_mean + System.lineSeparator() +
|
||||||
|
"Dsquared " + current_dsquared;
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,74 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utility_beans;
|
||||||
|
|
||||||
|
import slo_processing.SLOSubRule;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
|
||||||
|
import static configuration.Constants.epsilon;
|
||||||
|
import static configuration.Constants.roc_limit;
|
||||||
|
import static utility_beans.PredictedMonitoringAttribute.*;
|
||||||
|
import static utility_beans.RealtimeMonitoringAttribute.*;
|
||||||
|
|
||||||
|
public class MonitoringAttributeUtilities {
|
||||||
|
|
||||||
|
public static boolean isZero(double number){
|
||||||
|
return ((number <= epsilon) && (number>= -epsilon));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void initialize_values(String monitoring_metric_name, Double monitoring_attribute_value) throws Exception {
|
||||||
|
if (monitoring_attribute_value == null) {
|
||||||
|
throw new Exception("Empty input of previous metric values for metric " + monitoring_metric_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (getMonitoring_attributes().get(monitoring_metric_name) != null) {
|
||||||
|
getMonitoring_attributes().remove(monitoring_metric_name);
|
||||||
|
}
|
||||||
|
ArrayList<SLOSubRule> subrules_related_to_monitoring_attribute = SLOSubRule.getSlo_subrules_per_monitoring_attribute().get(monitoring_metric_name);
|
||||||
|
for (SLOSubRule subrule : subrules_related_to_monitoring_attribute) {
|
||||||
|
getPredicted_monitoring_attributes().remove(subrule.getId());
|
||||||
|
}
|
||||||
|
|
||||||
|
getMonitoring_attributes().put(monitoring_metric_name, new RealtimeMonitoringAttribute(monitoring_metric_name, monitoring_attribute_value));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void initialize_values(String monitoring_metric_name){
|
||||||
|
//First remove any pre-existing data then add new data
|
||||||
|
if (getMonitoring_attributes().get(monitoring_metric_name) != null) {
|
||||||
|
getMonitoring_attributes().remove(monitoring_metric_name);
|
||||||
|
}
|
||||||
|
|
||||||
|
getMonitoring_attributes().put(monitoring_metric_name, new RealtimeMonitoringAttribute(monitoring_metric_name));
|
||||||
|
|
||||||
|
getMonitoring_attributes_roc_statistics().put(monitoring_metric_name,new MonitoringAttributeStatistics()); //The rate of change of a metric, is a metric which itself should be monitored for its upper bound
|
||||||
|
|
||||||
|
if (!get_initial_upper_bound(monitoring_metric_name).equals(Double.NEGATIVE_INFINITY) &&
|
||||||
|
!get_initial_lower_bound(monitoring_metric_name).equals(Double.POSITIVE_INFINITY)) {
|
||||||
|
|
||||||
|
getMonitoring_attributes_statistics().put(monitoring_metric_name,new MonitoringAttributeStatistics(get_initial_lower_bound(monitoring_metric_name), get_initial_upper_bound(monitoring_metric_name)));
|
||||||
|
|
||||||
|
}else if (!get_initial_upper_bound(monitoring_metric_name).equals(Double.NEGATIVE_INFINITY)){
|
||||||
|
getMonitoring_attributes_statistics().put(monitoring_metric_name,new MonitoringAttributeStatistics(get_initial_upper_bound(monitoring_metric_name),true));
|
||||||
|
}else if (!get_initial_lower_bound(monitoring_metric_name).equals(Double.POSITIVE_INFINITY)){
|
||||||
|
getMonitoring_attributes_statistics().put(monitoring_metric_name,new MonitoringAttributeStatistics(get_initial_lower_bound(monitoring_metric_name),false));
|
||||||
|
}else {
|
||||||
|
getMonitoring_attributes_statistics().put(monitoring_metric_name,new MonitoringAttributeStatistics());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
getAttributes_maximum_rate_of_change().put(monitoring_metric_name,roc_limit);
|
||||||
|
getAttributes_minimum_rate_of_change().put(monitoring_metric_name,-roc_limit);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,299 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utility_beans;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static configuration.Constants.*;
|
||||||
|
import static utility_beans.MonitoringAttributeUtilities.isZero;
|
||||||
|
import static utility_beans.RealtimeMonitoringAttribute.*;
|
||||||
|
|
||||||
|
public class PredictedMonitoringAttribute {
|
||||||
|
|
||||||
|
private static HashMap <String,Double> attributes_maximum_rate_of_change = new HashMap<>(); //initialization
|
||||||
|
private static HashMap <String, Double> attributes_minimum_rate_of_change = new HashMap<>();
|
||||||
|
private static HashMap <String, Double> predicted_monitoring_attribute_values = new HashMap<>();
|
||||||
|
private static HashMap <Integer,HashMap<Long,PredictedMonitoringAttribute>> predicted_monitoring_attributes = new HashMap<>();
|
||||||
|
|
||||||
|
private String name;
|
||||||
|
private boolean initialized = false;
|
||||||
|
private double delta_for_greater_than_rule;
|
||||||
|
private double delta_for_less_than_rule;
|
||||||
|
private double threshold;
|
||||||
|
private double rate_of_change_for_greater_than_rule; // the rate of change for the metric
|
||||||
|
private double rate_of_change_for_less_than_rule; // the rate of change for the metric
|
||||||
|
private double probability_confidence; //the probability confidence for the prediction
|
||||||
|
private double confidence_interval_width;
|
||||||
|
private long timestamp;
|
||||||
|
private long targeted_prediction_time;
|
||||||
|
|
||||||
|
public PredictedMonitoringAttribute(String name){
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
public PredictedMonitoringAttribute(String name, double threshold, int associated_subrule_id, Double forecasted_value, double probability_confidence, double confidence_interval_width,long timestamp, long targeted_prediction_time){
|
||||||
|
|
||||||
|
//Below, it is assumed that the maximum of an attribute is 100, and the minimum of an attribute is 0
|
||||||
|
this.initialized = true;
|
||||||
|
this.name = name;
|
||||||
|
this.threshold = threshold;
|
||||||
|
double current_value = RealtimeMonitoringAttribute.get_metric_value(name);
|
||||||
|
if (Double.isNaN(current_value)){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Detected NaN value for metric "+name+". Thus we cannot compute severity although a predicted value of "+forecasted_value+" has arrived");
|
||||||
|
this.initialized = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.rate_of_change_for_greater_than_rule = getRateOfChange(forecasted_value, current_value,name);
|
||||||
|
this.rate_of_change_for_less_than_rule = -this.rate_of_change_for_greater_than_rule; //inversion necessary, as when a rate of change is positive, it means that the metric is increasing and thus not directed towards the interval in which a less-than rule is fired.
|
||||||
|
|
||||||
|
//Calculations for greater_than rule delta metric
|
||||||
|
|
||||||
|
if(getMonitoring_attributes_statistics().get(name).getUpper_bound()>threshold){
|
||||||
|
this.delta_for_greater_than_rule = 100*(forecasted_value - threshold)/(getMonitoring_attributes_statistics().get(name).getUpper_bound()-threshold);
|
||||||
|
}else /*if (getMonitoring_attributes_statistics().get(name).getUpper_bound()<=threshold)*/{
|
||||||
|
if (forecasted_value>threshold){
|
||||||
|
this.delta_for_greater_than_rule = 100;
|
||||||
|
}else if (forecasted_value==threshold){
|
||||||
|
this.delta_for_greater_than_rule = 0;
|
||||||
|
}else{
|
||||||
|
this.delta_for_greater_than_rule = -100;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.delta_for_greater_than_rule = Math.min(Math.max(this.delta_for_greater_than_rule,-100),100);
|
||||||
|
//this.previous_delta = 100*Math.abs(current_value - threshold)/(getMonitoring_attributes_statistics().get(name).getUpper_bound()-threshold);
|
||||||
|
|
||||||
|
//Calculations for less_than rule delta metric
|
||||||
|
|
||||||
|
if(threshold>getMonitoring_attributes_statistics().get(name).getLower_bound()) {
|
||||||
|
|
||||||
|
this.delta_for_less_than_rule = 100 * (threshold - forecasted_value) / (threshold - getMonitoring_attributes_statistics().get(name).getLower_bound());
|
||||||
|
|
||||||
|
//this.previous_delta = 100*Math.abs(current_value-threshold)/(threshold-getMonitoring_attributes_statistics().get(name).getLower_bound());
|
||||||
|
}else{
|
||||||
|
if (threshold>forecasted_value){
|
||||||
|
this.delta_for_less_than_rule = 100;
|
||||||
|
}else if (threshold==forecasted_value){
|
||||||
|
this.delta_for_less_than_rule = 0;
|
||||||
|
}else{
|
||||||
|
this.delta_for_less_than_rule = -100;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this.delta_for_less_than_rule = Math.min(Math.max(this.delta_for_less_than_rule,-100),100);
|
||||||
|
//this.previous_delta = 100*Math.abs(current_value-threshold)/(threshold-getMonitoring_attributes_statistics().get(name).getLower_bound());
|
||||||
|
|
||||||
|
this.probability_confidence = probability_confidence;
|
||||||
|
this.confidence_interval_width = confidence_interval_width;
|
||||||
|
//actual_metric_values = get_last_n_actual_values(Constants.elements_considered_in_prediction, MonitoringAttribute.get_monitoring_attributes_values_map().get(name),true);
|
||||||
|
this.timestamp = timestamp;
|
||||||
|
this.targeted_prediction_time = targeted_prediction_time;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
public static HashMap<Integer, HashMap<Long, PredictedMonitoringAttribute>> getPredicted_monitoring_attributes() {
|
||||||
|
return predicted_monitoring_attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashMap<String, Double> getAttributes_maximum_rate_of_change() {
|
||||||
|
return attributes_maximum_rate_of_change;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashMap<String, Double> getAttributes_minimum_rate_of_change() {
|
||||||
|
return attributes_minimum_rate_of_change;
|
||||||
|
}
|
||||||
|
|
||||||
|
private double getRateOfChange(double forecasted_value, double actual_value, String name) {
|
||||||
|
double maximum_rate_of_change = attributes_maximum_rate_of_change.get(name);
|
||||||
|
double minimum_rate_of_change = attributes_minimum_rate_of_change.get(name);
|
||||||
|
double rate_of_change,normalized_rate_of_change;
|
||||||
|
|
||||||
|
|
||||||
|
if (roc_calculation_mode.equals("prototype")) {
|
||||||
|
if (isZero(actual_value)) {
|
||||||
|
if (isZero(forecasted_value)){
|
||||||
|
rate_of_change = 0;
|
||||||
|
normalized_rate_of_change = 0;
|
||||||
|
}else {
|
||||||
|
rate_of_change = 1 * (forecasted_value/Math.abs(forecasted_value)) *roc_limit; //choosing maximum positive/negative value based on the sign of the forecasted value
|
||||||
|
normalized_rate_of_change = 100*Math.min(Math.max(rate_of_change, -1), 1);
|
||||||
|
}
|
||||||
|
} else if ( isZero(maximum_rate_of_change - minimum_rate_of_change)) {
|
||||||
|
rate_of_change = 1 * Math.max(Math.min((forecasted_value - actual_value) / Math.abs(actual_value),roc_limit),-roc_limit);
|
||||||
|
normalized_rate_of_change = 100*Math.min(Math.max(rate_of_change,-1),1);
|
||||||
|
} else {
|
||||||
|
rate_of_change = 1 * Math.max(Math.min(((forecasted_value - actual_value) / Math.abs(actual_value)),roc_limit),-roc_limit);
|
||||||
|
if (forecasted_value>actual_value){
|
||||||
|
normalized_rate_of_change = 100*Math.min(Math.max(rate_of_change/Math.abs(maximum_rate_of_change),-1),1);
|
||||||
|
}else{
|
||||||
|
normalized_rate_of_change = 100*Math.min(Math.max(rate_of_change/Math.abs(minimum_rate_of_change),-1),1);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else{
|
||||||
|
Logger.getAnonymousLogger().log(severe_logging_level,"Effectively disabling rate of change (ROC) metric, setting it to 0, as an invalid roc_calculation_mode has been chosen");
|
||||||
|
rate_of_change = 0;
|
||||||
|
normalized_rate_of_change = 0;
|
||||||
|
}
|
||||||
|
String debug_rate_of_change_string = "The rate of change for metric "+name+", having a forecasted value of "+forecasted_value+", previous real value of "+actual_value + ", maximum rate of change equal to "+maximum_rate_of_change+", minimum rate of change equal to "+minimum_rate_of_change+", is "+(int)(rate_of_change*10000)/100.0+"% and the normalized rate of change is "+(int)(normalized_rate_of_change*100)/100.0 +"%";
|
||||||
|
if(!debug_logging_level.equals(Level.OFF)) {
|
||||||
|
Logger.getAnonymousLogger().log(debug_logging_level, debug_rate_of_change_string);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Streaming percentile calculation, using non-normalized rate of change
|
||||||
|
getMonitoring_attributes_roc_statistics().get(name).update_attribute_statistics(rate_of_change);
|
||||||
|
|
||||||
|
if (attributes_maximum_rate_of_change.get(name)!=null) {
|
||||||
|
attributes_maximum_rate_of_change.remove(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attributes_minimum_rate_of_change.get(name)!=null) {
|
||||||
|
attributes_minimum_rate_of_change.remove(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
attributes_maximum_rate_of_change.put(name,Math.min(getMonitoring_attributes_roc_statistics().get(name).getUpper_bound(),roc_limit));
|
||||||
|
|
||||||
|
attributes_minimum_rate_of_change.put(name,Math.max(getMonitoring_attributes_roc_statistics().get(name).getLower_bound(),-roc_limit));
|
||||||
|
|
||||||
|
if (Double.isNaN(getMonitoring_attributes_roc_statistics().get(name).getUpper_bound())){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"NaN value detected for maximum rate of change. The individual metric values are "+getMonitoring_attributes_roc_statistics().get(name).toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Double.isNaN(getMonitoring_attributes_roc_statistics().get(name).getLower_bound())){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"NaN value detected for minimum rate of change. The individual metric values are "+getMonitoring_attributes_roc_statistics().get(name).toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
return Math.max(Math.min(normalized_rate_of_change,100.0),-100.0);
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getNormalizedConfidenceIntervalWidth(){
|
||||||
|
|
||||||
|
double normalized_interval;
|
||||||
|
double maximum_metric_value = getMonitoring_attributes_statistics().get(name).getUpper_bound();
|
||||||
|
double minimum_metric_value = getMonitoring_attributes_statistics().get(name).getLower_bound();
|
||||||
|
|
||||||
|
if (Double.isInfinite(this.confidence_interval_width)){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Since the confidence interval is deemed to be infinite, it will be set to 100 and the relevant probability confidence factor should be reduced to the lowest value");
|
||||||
|
return 100;
|
||||||
|
}
|
||||||
|
if (isZero(maximum_metric_value-minimum_metric_value)){
|
||||||
|
normalized_interval = 50; //Assuming an average case, from 0 to 100
|
||||||
|
}else{
|
||||||
|
normalized_interval = 100*this.confidence_interval_width/(maximum_metric_value-minimum_metric_value);
|
||||||
|
double normalized_interval_sign = normalized_interval/Math.abs(normalized_interval);
|
||||||
|
if (Math.abs(normalized_interval)>100){
|
||||||
|
normalized_interval = 100*normalized_interval_sign;
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Due to the maximum and minimum metric values being estimated as "+maximum_metric_value+ " and "+minimum_metric_value+" respectively, and as the value of the confidence interval width is "+this.confidence_interval_width+" the absolute value of the normalized interval is limited to a value of "+normalized_interval);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return normalized_interval;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private <T extends Number> ArrayList<T> get_last_n_actual_values(int n, ArrayList<T> values, boolean truncate_old_values){
|
||||||
|
int arraylist_size = values.size();
|
||||||
|
ArrayList<T> new_values = new ArrayList<>();
|
||||||
|
if (truncate_old_values) {
|
||||||
|
for (int i = 0; i < arraylist_size; i++) {
|
||||||
|
if (i<(arraylist_size-n)) {
|
||||||
|
values.remove(0);
|
||||||
|
}else{
|
||||||
|
new_values.add(values.get(i+n-arraylist_size));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return new_values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getDelta_for_greater_than_rule() {
|
||||||
|
return delta_for_greater_than_rule;
|
||||||
|
}
|
||||||
|
public double getDelta_for_less_than_rule() {
|
||||||
|
return delta_for_less_than_rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDelta_for_greater_than_rule(double delta) {
|
||||||
|
this.delta_for_greater_than_rule = delta_for_greater_than_rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setDelta_for_less_than_rule(double delta) {
|
||||||
|
this.delta_for_less_than_rule = delta_for_less_than_rule;
|
||||||
|
}
|
||||||
|
public double getThreshold() {
|
||||||
|
return threshold;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setThreshold(double threshold) {
|
||||||
|
this.threshold = threshold;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getRate_of_change_for_greater_than_rule() {
|
||||||
|
return rate_of_change_for_greater_than_rule;
|
||||||
|
}
|
||||||
|
public double getRate_of_change_for_less_than_rule() {
|
||||||
|
return rate_of_change_for_less_than_rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRate_of_change_for_greater_than_rule(double rate_of_change_for_greater_than_rule) {
|
||||||
|
this.rate_of_change_for_greater_than_rule = rate_of_change_for_greater_than_rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setRate_of_change_for_less_than_rule(double rate_of_change_for_less_than_rule) {
|
||||||
|
this.rate_of_change_for_less_than_rule = rate_of_change_for_less_than_rule;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getProbability_confidence() {
|
||||||
|
return probability_confidence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setProbability_confidence(double probability_confidence) {
|
||||||
|
this.probability_confidence = probability_confidence;
|
||||||
|
}
|
||||||
|
|
||||||
|
public double getConfidence_interval_width() {
|
||||||
|
return confidence_interval_width;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setConfidence_interval_width(double confidence_interval_width) {
|
||||||
|
this.confidence_interval_width = confidence_interval_width;
|
||||||
|
}
|
||||||
|
public String getName() {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashMap<String, Double> getPredicted_monitoring_attribute_values() {
|
||||||
|
return predicted_monitoring_attribute_values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isInitialized() {
|
||||||
|
return initialized;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setInitialized(boolean initialized) {
|
||||||
|
this.initialized = initialized;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getTimestamp() {
|
||||||
|
return timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setTimestamp(long timestamp) {
|
||||||
|
this.timestamp = timestamp;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString(){
|
||||||
|
String output = "";
|
||||||
|
output="{"+name+" deltas: "+delta_for_greater_than_rule+","+delta_for_less_than_rule+" ROCs: "+rate_of_change_for_greater_than_rule+","+rate_of_change_for_less_than_rule+" PrConf:"+probability_confidence+" Confidence Interval: "+confidence_interval_width+" Prediction Timestamp: "+timestamp+"}";
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,29 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utility_beans;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
|
|
||||||
|
public class PredictionAttributeSet {
|
||||||
|
private static ArrayList<PredictedMonitoringAttribute> prediction_attributes = new ArrayList<>();
|
||||||
|
|
||||||
|
public PredictionAttributeSet(Collection<PredictedMonitoringAttribute> prediction_attributes){
|
||||||
|
this.prediction_attributes.addAll(prediction_attributes);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addPredictionAttribute(PredictedMonitoringAttribute predictionAttribute){
|
||||||
|
prediction_attributes.add(predictionAttribute);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static ArrayList<PredictedMonitoringAttribute> getPredictionAttributes(){
|
||||||
|
return prediction_attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,204 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utility_beans;
|
||||||
|
|
||||||
|
import org.apache.commons.collections4.queue.CircularFifoQueue;
|
||||||
|
import utilities.MathUtils;
|
||||||
|
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static configuration.Constants.*;
|
||||||
|
import static utility_beans.PredictedMonitoringAttribute.*;
|
||||||
|
|
||||||
|
public class RealtimeMonitoringAttribute {
|
||||||
|
private static HashMap<String,String> monitoring_attributes_bounds_representation = new HashMap<>();
|
||||||
|
//private static HashMap<String, Double> monitoring_attributes_min_values = new HashMap<>();
|
||||||
|
//private static HashMap<String, Double> monitoring_attributes_max_values = new HashMap<>();
|
||||||
|
private static HashMap<String, MonitoringAttributeStatistics> monitoring_attributes_statistics = new HashMap<>();
|
||||||
|
private static HashMap<String, MonitoringAttributeStatistics> monitoring_attributes_roc_statistics = new HashMap<>();
|
||||||
|
private static HashMap<String, RealtimeMonitoringAttribute> monitoring_attributes = new HashMap<>();
|
||||||
|
private CircularFifoQueue<Double> actual_metric_values = new CircularFifoQueue<Double>(kept_values_per_metric); //the previous actual values of the metric
|
||||||
|
protected String name;
|
||||||
|
|
||||||
|
public RealtimeMonitoringAttribute(String name, Collection<Double> values){
|
||||||
|
this.name = name;
|
||||||
|
values.stream().forEach(x -> actual_metric_values.add(x));
|
||||||
|
}
|
||||||
|
public RealtimeMonitoringAttribute(String name, Double value){
|
||||||
|
this.name = name;
|
||||||
|
actual_metric_values.add(value);
|
||||||
|
}
|
||||||
|
public RealtimeMonitoringAttribute(String name){
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Double get_metric_value(String metric_name){
|
||||||
|
CircularFifoQueue<Double> actual_metric_values = monitoring_attributes.get(metric_name).getActual_metric_values();
|
||||||
|
if (actual_metric_values.size()==0){
|
||||||
|
Logger.getAnonymousLogger().log(warning_logging_level,"Trying to retrieve realtime values from an empty queue for metric "+metric_name);
|
||||||
|
}
|
||||||
|
return aggregate_metric_values(actual_metric_values);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static Double aggregate_metric_values(Iterable<Double> metric_values) {
|
||||||
|
return MathUtils.get_average(metric_values);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void update_monitoring_attribute_value(String name,Double value){
|
||||||
|
if(monitoring_attributes.get(name)==null){
|
||||||
|
monitoring_attributes.put(name,new RealtimeMonitoringAttribute(name));
|
||||||
|
//monitoring_attributes_max_values.put(name,value);
|
||||||
|
//monitoring_attributes_min_values.put(name,value);
|
||||||
|
|
||||||
|
}
|
||||||
|
monitoring_attributes.get(name).getActual_metric_values().add(value);
|
||||||
|
getMonitoring_attributes_statistics().get(name).update_attribute_statistics(value);
|
||||||
|
/*
|
||||||
|
if(get_90th_percentile_high_value(name,value)>monitoring_attributes_max_values.get(name)){
|
||||||
|
monitoring_attributes_max_values.put(name,value);
|
||||||
|
}else if (get_90th_percentile_low_value(name,value)<monitoring_attributes_min_values.get(name)){
|
||||||
|
monitoring_attributes_min_values.put(name,value);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static <T extends Iterable<String>> void initialize_monitoring_attribute_rates_of_change(T metric_names){
|
||||||
|
initialize_monitoring_attribute_hashmap(monitoring_attributes,metric_names);
|
||||||
|
initialize_attribute_value_hashmap(getAttributes_maximum_rate_of_change(),metric_names);
|
||||||
|
initialize_attribute_value_hashmap(getAttributes_minimum_rate_of_change(),metric_names);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Iterable<String>> void initialize_monitoring_attribute_hashmap(HashMap<String, RealtimeMonitoringAttribute> map, T metric_names){
|
||||||
|
for (String metric_name : metric_names){
|
||||||
|
map.put(metric_name,new RealtimeMonitoringAttribute(metric_name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Iterable<String>> void simple_initialize_0_100_bounded_attributes(T metric_names){
|
||||||
|
for (String metric_name : metric_names) {
|
||||||
|
getMonitoring_attributes_statistics().put(metric_name, new MonitoringAttributeStatistics(0,100));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
public static <T extends Iterable<String>> void initialize_monitoring_attribute_min_values(T metric_names){
|
||||||
|
initialize_attribute_value_hashmap(monitoring_attributes_min_values,metric_names);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Iterable<String>> void initialize_monitoring_attribute_max_values(T metric_names){
|
||||||
|
initialize_attribute_value_hashmap(monitoring_attributes_max_values,metric_names);
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
private static <T extends Iterable<String>> void initialize_attribute_value_hashmap(HashMap<String,Double> hashmap ,T metric_names){
|
||||||
|
for (String metric_name: metric_names){
|
||||||
|
hashmap.put(metric_name,0.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/*
|
||||||
|
public static HashMap<String, Double> getMonitoring_attributes_min_values() {
|
||||||
|
return monitoring_attributes_min_values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setMonitoring_attributes_min_values(HashMap<String, Double> monitoring_attributes_min_values) {
|
||||||
|
RealtimeMonitoringAttribute.monitoring_attributes_min_values = monitoring_attributes_min_values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashMap<String, Double> getMonitoring_attributes_max_values() {
|
||||||
|
return monitoring_attributes_max_values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setMonitoring_attributes_max_values(HashMap<String, Double> monitoring_attributes_max_values) {
|
||||||
|
RealtimeMonitoringAttribute.monitoring_attributes_max_values = monitoring_attributes_max_values;
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
public static void update_monitoring_attributes_values_map(HashMap<String, Double> input_data) {
|
||||||
|
for (HashMap.Entry<String,Double> entry: input_data.entrySet()){
|
||||||
|
update_monitoring_attribute_value(entry.getKey(),entry.getValue());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setName(String name) {
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName(){
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
public CircularFifoQueue<Double> getActual_metric_values() {
|
||||||
|
return actual_metric_values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setActual_metric_values(CircularFifoQueue<Double> actual_metric_values) {
|
||||||
|
this.actual_metric_values = actual_metric_values;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashMap<String, RealtimeMonitoringAttribute> getMonitoring_attributes() {
|
||||||
|
return monitoring_attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setMonitoring_attributes(HashMap<String, RealtimeMonitoringAttribute> monitoring_attributes) {
|
||||||
|
RealtimeMonitoringAttribute.monitoring_attributes = monitoring_attributes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashMap<String, MonitoringAttributeStatistics> getMonitoring_attributes_statistics() {
|
||||||
|
return monitoring_attributes_statistics;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setMonitoring_attributes_statistics(HashMap<String, MonitoringAttributeStatistics> monitoring_attributes_statistics) {
|
||||||
|
RealtimeMonitoringAttribute.monitoring_attributes_statistics = monitoring_attributes_statistics;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static HashMap<String, MonitoringAttributeStatistics> getMonitoring_attributes_roc_statistics() {
|
||||||
|
return monitoring_attributes_roc_statistics;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setMonitoring_attributes_roc_statistics(HashMap<String, MonitoringAttributeStatistics> monitoring_attributes_roc_statistics) {
|
||||||
|
RealtimeMonitoringAttribute.monitoring_attributes_roc_statistics = monitoring_attributes_roc_statistics;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Double get_initial_upper_bound(String attribute_name){
|
||||||
|
|
||||||
|
if (monitoring_attributes_bounds_representation.get(attribute_name)==null) {
|
||||||
|
return 100.0;
|
||||||
|
}
|
||||||
|
if (monitoring_attributes_bounds_representation.get(attribute_name).split(";")[1].equals("unbounded")){
|
||||||
|
return Double.NEGATIVE_INFINITY;
|
||||||
|
}else{
|
||||||
|
return Double.parseDouble(monitoring_attributes_bounds_representation.get(attribute_name).split(";")[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
public static Double get_initial_lower_bound(String attribute_name){
|
||||||
|
|
||||||
|
if (monitoring_attributes_bounds_representation.get(attribute_name)==null) {
|
||||||
|
return 0.0;
|
||||||
|
}
|
||||||
|
if (monitoring_attributes_bounds_representation.get(attribute_name).split(";")[0].equals("unbounded")){
|
||||||
|
return Double.POSITIVE_INFINITY;
|
||||||
|
}else{
|
||||||
|
return Double.parseDouble(monitoring_attributes_bounds_representation.get(attribute_name).split(";")[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public static HashMap<String, String> getMonitoring_attributes_bounds_representation() {
|
||||||
|
return monitoring_attributes_bounds_representation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setMonitoring_attributes_bounds_representation(HashMap<String, String> monitoring_attributes_bounds_representation) {
|
||||||
|
RealtimeMonitoringAttribute.monitoring_attributes_bounds_representation = monitoring_attributes_bounds_representation;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,27 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utility_beans;
|
||||||
|
|
||||||
|
public class SynchronizedBoolean {
|
||||||
|
private Boolean value;
|
||||||
|
public SynchronizedBoolean(Boolean value){
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
public SynchronizedBoolean(){
|
||||||
|
this(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Boolean getValue() {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setValue(Boolean value) {
|
||||||
|
this.value = value;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,29 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utility_beans;
|
||||||
|
|
||||||
|
|
||||||
|
import utility_beans.SynchronizedBoolean;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
|
||||||
|
public class SynchronizedBooleanMap {
|
||||||
|
private Map<String, SynchronizedBoolean> synchronized_map = Collections.synchronizedMap(new HashMap<>()); // using Collections.synchronized map as we intend to add/remove topics to the map dynamically
|
||||||
|
public SynchronizedBoolean get_synchronized_boolean(String name){
|
||||||
|
if (synchronized_map.containsKey(name)) {
|
||||||
|
return synchronized_map.get(name);
|
||||||
|
}else{
|
||||||
|
synchronized_map.put(name,new SynchronizedBoolean(false));
|
||||||
|
return synchronized_map.get(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,33 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package utility_beans;
|
||||||
|
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static configuration.Constants.EMPTY;
|
||||||
|
|
||||||
|
|
||||||
|
public class SynchronizedStringMap {
|
||||||
|
private Map<String, String> synchronized_map = Collections.synchronizedMap(new HashMap<>()); // using Collections.synchronized map as we intend to add/remove topics to the map dynamically
|
||||||
|
public String get_synchronized_contents(String name){
|
||||||
|
if (synchronized_map.containsKey(name)) {
|
||||||
|
return synchronized_map.get(name);
|
||||||
|
}else{
|
||||||
|
synchronized_map.put(name,new String(EMPTY));
|
||||||
|
return synchronized_map.get(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
public String assign_value(String topic, String value){
|
||||||
|
synchronized_map.put(topic,value);
|
||||||
|
return synchronized_map.get(topic);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,18 @@
|
|||||||
|
#
|
||||||
|
# Copyright (C) 2017-2019 Institute of Communication and Computer Systems (imu.iccs.gr)
|
||||||
|
#
|
||||||
|
# This Source Code Form is subject to the terms of the Mozilla Public License, v2.0, unless
|
||||||
|
# Esper library is used, in which case it is subject to the terms of General Public License v2.0.
|
||||||
|
# If a copy of the MPL was not distributed with this file, you can obtain one at
|
||||||
|
# https://www.mozilla.org/en-US/MPL/2.0/
|
||||||
|
#
|
||||||
|
|
||||||
|
# Broker Client settings
|
||||||
|
brokerclient.broker-url=tcp://localhost:61616
|
||||||
|
brokerclient.broker-url-properties=transport.daemon=true&transport.trace=false&transport.useKeepAlive=true&transport.useInactivityMonitor=false&transport.needClientAuth=${brokerclient.ssl.client-auth.required}&transport.verifyHostName=false
|
||||||
|
brokerclient.ssl.client-auth.required=false
|
||||||
|
brokerclient.preserve-connection=false
|
||||||
|
|
||||||
|
# Broker authentication
|
||||||
|
brokerclient.broker-username=
|
||||||
|
brokerclient.broker-password=
|
@ -0,0 +1,14 @@
|
|||||||
|
self_publish_rule_file = false
|
||||||
|
|
||||||
|
metrics_bounds = avgResponseTime;unbounded;unbounded,custom2;0;3
|
||||||
|
|
||||||
|
slo_rules_topic = metric.metric_list
|
||||||
|
single_slo_rule_active = true
|
||||||
|
broker_ip_url = tcp://localhost:61616?wireFormat.maxInactivityDuration=0
|
||||||
|
broker_username = morphemic
|
||||||
|
broker_password = morphemic
|
||||||
|
|
||||||
|
slo_violation_probability_threshold = 0.1
|
||||||
|
slo_violation_determination_method = prconf-delta
|
||||||
|
time_horizon_seconds = 120
|
||||||
|
maximum_acceptable_forward_predictions = 30
|
24
slo-violation-detector/src/main/resources/test.json
Normal file
24
slo-violation-detector/src/main/resources/test.json
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"and-1": {
|
||||||
|
"attribute": "cpu",
|
||||||
|
"operator": ">",
|
||||||
|
"value": "80"
|
||||||
|
},
|
||||||
|
"and-2":{
|
||||||
|
"attribute": "ram",
|
||||||
|
"operator": ">",
|
||||||
|
"value": "70"
|
||||||
|
},
|
||||||
|
"and-3":{
|
||||||
|
"and-3.1":{
|
||||||
|
"attribute": "bandwidth",
|
||||||
|
"operator": ">",
|
||||||
|
"value": "70"
|
||||||
|
},
|
||||||
|
"or-3.2":{
|
||||||
|
"attribute": "disk",
|
||||||
|
"operator": ">",
|
||||||
|
"value": "90"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
34
slo-violation-detector/src/main/resources/test_v2.json
Normal file
34
slo-violation-detector/src/main/resources/test_v2.json
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"id":"and-0",
|
||||||
|
"and-0":[
|
||||||
|
{
|
||||||
|
"id":"1",
|
||||||
|
"attribute": "cpu",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "80"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id":"2",
|
||||||
|
"attribute": "ram",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id":"or-3",
|
||||||
|
"or-3":[
|
||||||
|
{
|
||||||
|
"id":"4",
|
||||||
|
"attribute": "bandwidth",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id":"5",
|
||||||
|
"attribute": "disk",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "90"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,41 @@
|
|||||||
|
{
|
||||||
|
"id": "and-0",
|
||||||
|
"and-0": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"attribute": "cpu",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"attribute": "ram",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "3",
|
||||||
|
"attribute": "disk",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "4",
|
||||||
|
"attribute": "bandwidth",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "5",
|
||||||
|
"attribute": "custom1",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "6",
|
||||||
|
"attribute": "custom2",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"id":"1",
|
||||||
|
"attribute": "cpu",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
}
|
@ -0,0 +1,17 @@
|
|||||||
|
{
|
||||||
|
"id": "and-0",
|
||||||
|
"and-0": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"attribute": "cpu",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "80"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2",
|
||||||
|
"attribute": "ram",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"id": "and-0",
|
||||||
|
"and-0": [
|
||||||
|
{
|
||||||
|
"id": "1",
|
||||||
|
"attribute": "cpu_usage",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "or-2",
|
||||||
|
"or-2": [
|
||||||
|
{
|
||||||
|
"id": "3",
|
||||||
|
"attribute": "memory",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "70"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "4",
|
||||||
|
"attribute": "disk",
|
||||||
|
"operator": ">",
|
||||||
|
"threshold": "95"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"id":"1",
|
||||||
|
"attribute": "custom_metric_1",
|
||||||
|
"operator": ">=",
|
||||||
|
"threshold": "1"
|
||||||
|
}
|
@ -0,0 +1,36 @@
|
|||||||
|
{
|
||||||
|
"name": "_",
|
||||||
|
"operator":"OR",
|
||||||
|
"constraints":[
|
||||||
|
{
|
||||||
|
"name":"cpu_and_memory_or_disk_too_high",
|
||||||
|
"operator":"AND",
|
||||||
|
"constraints": [
|
||||||
|
{
|
||||||
|
"name":"cpu_usage_high",
|
||||||
|
"metric":"cpu_usage",
|
||||||
|
"operator":">",
|
||||||
|
"threshold":70.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "memory_or_disk_usage_high",
|
||||||
|
"operator": "OR",
|
||||||
|
"constraints": [
|
||||||
|
{
|
||||||
|
"name":"memory_usage_high",
|
||||||
|
"metric":"memory",
|
||||||
|
"operator":">",
|
||||||
|
"threshold":70.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "disk_usage_high",
|
||||||
|
"metric":"disk",
|
||||||
|
"operator":">",
|
||||||
|
"threshold":95.0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
@ -0,0 +1,12 @@
|
|||||||
|
{
|
||||||
|
"name": "_",
|
||||||
|
"operator":"OR",
|
||||||
|
"constraints":[
|
||||||
|
{
|
||||||
|
"name":"custom_metric_1_too_high",
|
||||||
|
"metric": "custom_metric_1",
|
||||||
|
"operator":"\u003e",
|
||||||
|
"threshold": 1.0
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
89
slo-violation-detector/src/test/java/ConnectivityTests.java
Normal file
89
slo-violation-detector/src/test/java/ConnectivityTests.java
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import eu.melodic.event.brokerclient.BrokerPublisher;
|
||||||
|
import eu.melodic.event.brokerclient.BrokerSubscriber;
|
||||||
|
import org.apache.commons.lang3.mutable.MutableBoolean;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.json.simple.parser.ParseException;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import javax.jms.JMSException;
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.FileInputStream;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.net.URI;
|
||||||
|
import java.util.Properties;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static configuration.Constants.*;
|
||||||
|
import static runtime.Main.running_threads;
|
||||||
|
|
||||||
|
|
||||||
|
public class ConnectivityTests {
|
||||||
|
|
||||||
|
private Boolean connectivity_result_success;
|
||||||
|
@Test
|
||||||
|
public void test_broker_connectivity() throws IOException {
|
||||||
|
|
||||||
|
Properties prop = new Properties();
|
||||||
|
|
||||||
|
URI absolute_configuration_file_path = new File(configuration_file_location).toURI();
|
||||||
|
base_project_path = new File("").toURI();
|
||||||
|
URI relative_path = base_project_path.relativize(absolute_configuration_file_path);
|
||||||
|
|
||||||
|
InputStream inputStream = new FileInputStream(base_project_path.getPath()+relative_path.getPath());
|
||||||
|
|
||||||
|
prop.load(inputStream);
|
||||||
|
|
||||||
|
BrokerPublisher publisher = new BrokerPublisher("test_topic",prop.getProperty("broker_ip_url"),prop.getProperty("broker_username"),prop.getProperty("broker_password"), amq_library_configuration_location);
|
||||||
|
|
||||||
|
BrokerSubscriber subscriber = new BrokerSubscriber("test_topic",prop.getProperty("broker_ip_url"),prop.getProperty("broker_username"),prop.getProperty("broker_password"),amq_library_configuration_location);
|
||||||
|
|
||||||
|
JSONObject object_to_publish = new JSONObject();
|
||||||
|
object_to_publish.put("ram","95");
|
||||||
|
object_to_publish.put("cpu","99");
|
||||||
|
|
||||||
|
BiFunction<String,String,String> slo_function = (topic,message)->{
|
||||||
|
Double cpu_slo_limit = 70.0;
|
||||||
|
Double ram_slo_limit = 60.0;
|
||||||
|
Boolean return_value = false;
|
||||||
|
try {
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Received " + message);
|
||||||
|
JSONObject rules_json = (JSONObject) new JSONParser().parse(message);
|
||||||
|
Double ram_value = Double.parseDouble(rules_json.get("ram").toString());
|
||||||
|
Double cpu_value = Double.parseDouble(rules_json.get("cpu").toString());
|
||||||
|
return_value = (ram_value>ram_slo_limit && cpu_value>cpu_slo_limit);
|
||||||
|
} catch (ParseException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
connectivity_result_success = return_value;
|
||||||
|
return return_value.toString();
|
||||||
|
};
|
||||||
|
|
||||||
|
Thread subscription_thread = new Thread(() -> {
|
||||||
|
subscriber.subscribe(slo_function,new AtomicBoolean(false)); //will be a short-lived test, so setting stop signal to false
|
||||||
|
});
|
||||||
|
subscription_thread.start();
|
||||||
|
running_threads.put("Test topic subscription thread",subscription_thread);
|
||||||
|
|
||||||
|
publisher.publish(object_to_publish.toJSONString());
|
||||||
|
try {
|
||||||
|
Thread.sleep(2000);
|
||||||
|
}catch (InterruptedException i){
|
||||||
|
i.printStackTrace();
|
||||||
|
}
|
||||||
|
assert connectivity_result_success;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,37 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
import utility_beans.MonitoringAttributeStatistics;
|
||||||
|
import utility_beans.MonitoringAttributeUtilities;
|
||||||
|
import utility_beans.RealtimeMonitoringAttribute;
|
||||||
|
import utility_beans.PredictedMonitoringAttribute;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
|
||||||
|
import static configuration.Constants.roc_limit;
|
||||||
|
import static utility_beans.PredictedMonitoringAttribute.getAttributes_maximum_rate_of_change;
|
||||||
|
import static utility_beans.PredictedMonitoringAttribute.getAttributes_minimum_rate_of_change;
|
||||||
|
import static utility_beans.RealtimeMonitoringAttribute.getMonitoring_attributes_roc_statistics;
|
||||||
|
|
||||||
|
public class DerivedMonitoringAttributeTests {
|
||||||
|
@Test
|
||||||
|
public void roc_calculation_test(){
|
||||||
|
|
||||||
|
RealtimeMonitoringAttribute.simple_initialize_0_100_bounded_attributes(Arrays.asList(new String[]{"cpu"}));
|
||||||
|
RealtimeMonitoringAttribute.update_monitoring_attribute_value("cpu",0.0);
|
||||||
|
getMonitoring_attributes_roc_statistics().put("cpu", new MonitoringAttributeStatistics()); //The rate of change of a metric, is a metric which itself should be monitored for its upper bound
|
||||||
|
|
||||||
|
getAttributes_maximum_rate_of_change().put("cpu",roc_limit);
|
||||||
|
getAttributes_minimum_rate_of_change().put("cpu",-roc_limit);
|
||||||
|
|
||||||
|
PredictedMonitoringAttribute prediction_attribute = new PredictedMonitoringAttribute("cpu",70,1,100.0,100,1,System.currentTimeMillis(),System.currentTimeMillis()+20000);
|
||||||
|
|
||||||
|
assert prediction_attribute.getRate_of_change_for_greater_than_rule() == 100.0;
|
||||||
|
}
|
||||||
|
}
|
103
slo-violation-detector/src/test/java/SeverityTests.java
Normal file
103
slo-violation-detector/src/test/java/SeverityTests.java
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import org.junit.Test;
|
||||||
|
import slo_processing.SLOSubRule;
|
||||||
|
import utilities.SLOViolationCalculator;
|
||||||
|
import utility_beans.MonitoringAttributeStatistics;
|
||||||
|
import utility_beans.RealtimeMonitoringAttribute;
|
||||||
|
import utility_beans.PredictedMonitoringAttribute;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
|
||||||
|
import static utility_beans.RealtimeMonitoringAttribute.getMonitoring_attributes_roc_statistics;
|
||||||
|
|
||||||
|
public class SeverityTests {
|
||||||
|
@Test
|
||||||
|
public void all_metrics_Severity_test_1(){
|
||||||
|
|
||||||
|
ArrayList<String> metric_names = new ArrayList<>(){{
|
||||||
|
add("cpu");
|
||||||
|
}};
|
||||||
|
|
||||||
|
RealtimeMonitoringAttribute.initialize_monitoring_attribute_rates_of_change(metric_names);
|
||||||
|
//RealtimeMonitoringAttribute.initialize_monitoring_attribute_min_values(metric_names);
|
||||||
|
//RealtimeMonitoringAttribute.initialize_monitoring_attribute_max_values(metric_names);
|
||||||
|
RealtimeMonitoringAttribute.simple_initialize_0_100_bounded_attributes(metric_names);
|
||||||
|
for(String monitoring_metric_name : metric_names) {
|
||||||
|
getMonitoring_attributes_roc_statistics().put(monitoring_metric_name, new MonitoringAttributeStatistics()); //The rate of change of a metric, is a metric which itself should be monitored for its upper bound
|
||||||
|
}
|
||||||
|
RealtimeMonitoringAttribute.update_monitoring_attribute_value("cpu",0.0);
|
||||||
|
|
||||||
|
PredictedMonitoringAttribute prediction_attribute = new PredictedMonitoringAttribute("cpu",70,1,100.0,100,10,System.currentTimeMillis(),System.currentTimeMillis()+20000);
|
||||||
|
|
||||||
|
assert (prediction_attribute.getRate_of_change_for_greater_than_rule() < 100.0000000001 && prediction_attribute.getRate_of_change_for_greater_than_rule()>99.9999999999); //maximum value
|
||||||
|
assert (prediction_attribute.getConfidence_interval_width() <10.000000000001 && prediction_attribute.getConfidence_interval_width()>9.9999999999);
|
||||||
|
assert(prediction_attribute.getProbability_confidence()<100.0000000001 && prediction_attribute.getProbability_confidence()>99.99999999);
|
||||||
|
assert(prediction_attribute.getDelta_for_greater_than_rule()<100.0000000001 && prediction_attribute.getDelta_for_greater_than_rule()>99.99999999);
|
||||||
|
assert Math.floor(SLOViolationCalculator.get_Severity_all_metrics_method(prediction_attribute, SLOSubRule.RuleType.greater_than_rule)*100) == 9678;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void all_metrics_Severity_test_2(){
|
||||||
|
|
||||||
|
ArrayList<String> metric_names = new ArrayList<>(){{
|
||||||
|
add("cpu");
|
||||||
|
}};
|
||||||
|
|
||||||
|
RealtimeMonitoringAttribute.initialize_monitoring_attribute_rates_of_change(metric_names);
|
||||||
|
//RealtimeMonitoringAttribute.initialize_monitoring_attribute_min_values(metric_names);
|
||||||
|
//RealtimeMonitoringAttribute.initialize_monitoring_attribute_max_values(metric_names);
|
||||||
|
RealtimeMonitoringAttribute.simple_initialize_0_100_bounded_attributes(metric_names);
|
||||||
|
for(String monitoring_metric_name : metric_names) {
|
||||||
|
getMonitoring_attributes_roc_statistics().put(monitoring_metric_name, new MonitoringAttributeStatistics()); //The rate of change of a metric, is a metric which itself should be monitored for its upper bound
|
||||||
|
}
|
||||||
|
RealtimeMonitoringAttribute.update_monitoring_attribute_value("cpu",30.0);
|
||||||
|
|
||||||
|
PredictedMonitoringAttribute prediction_attribute = new PredictedMonitoringAttribute("cpu",70,1,80.0,90,5,System.currentTimeMillis(),System.currentTimeMillis()+20000);
|
||||||
|
|
||||||
|
assert (prediction_attribute.getRate_of_change_for_greater_than_rule() > 99.99999999 && prediction_attribute.getRate_of_change_for_greater_than_rule()< 100.00000001); //zero value
|
||||||
|
assert (prediction_attribute.getConfidence_interval_width() <5.000000000001 && prediction_attribute.getConfidence_interval_width()>4.9999999999);
|
||||||
|
assert (prediction_attribute.getProbability_confidence()<90.0000000001 && prediction_attribute.getProbability_confidence()>89.99999999);
|
||||||
|
assert (prediction_attribute.getDelta_for_greater_than_rule()<33.3333333334 && prediction_attribute.getDelta_for_greater_than_rule()>33.3333333332);
|
||||||
|
assert Math.floor(SLOViolationCalculator.get_Severity_all_metrics_method(prediction_attribute, SLOSubRule.RuleType.greater_than_rule)*100) == 7836;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void all_metrics_Severity_test_3(){
|
||||||
|
|
||||||
|
ArrayList<String> metric_names = new ArrayList<>(){{
|
||||||
|
add("cpu");
|
||||||
|
}};
|
||||||
|
|
||||||
|
RealtimeMonitoringAttribute.initialize_monitoring_attribute_rates_of_change(metric_names);
|
||||||
|
//RealtimeMonitoringAttribute.initialize_monitoring_attribute_min_values(metric_names);
|
||||||
|
//RealtimeMonitoringAttribute.initialize_monitoring_attribute_max_values(metric_names);
|
||||||
|
RealtimeMonitoringAttribute.simple_initialize_0_100_bounded_attributes(metric_names);
|
||||||
|
for(String monitoring_metric_name : metric_names) {
|
||||||
|
getMonitoring_attributes_roc_statistics().put(monitoring_metric_name, new MonitoringAttributeStatistics()); //The rate of change of a metric, is a metric which itself should be monitored for its upper bound
|
||||||
|
}
|
||||||
|
RealtimeMonitoringAttribute.update_monitoring_attribute_value("cpu",86.0);
|
||||||
|
|
||||||
|
PredictedMonitoringAttribute prediction_attribute = new PredictedMonitoringAttribute("cpu",75,1,92.0,88,7.8,System.currentTimeMillis(),System.currentTimeMillis()+20000);
|
||||||
|
|
||||||
|
assert (prediction_attribute.getRate_of_change_for_greater_than_rule()> 6.97674418604 && prediction_attribute.getRate_of_change_for_greater_than_rule()< 6.97674418605 ); //zero value
|
||||||
|
assert (prediction_attribute.getConfidence_interval_width() >7.7999999 && prediction_attribute.getConfidence_interval_width()<7.8000001);
|
||||||
|
assert (prediction_attribute.getProbability_confidence()<88.0000000001 && prediction_attribute.getProbability_confidence()>87.99999999);
|
||||||
|
assert (prediction_attribute.getDelta_for_greater_than_rule()<68.0000000001 && prediction_attribute.getDelta_for_greater_than_rule()>67.99999999);
|
||||||
|
assert Math.floor(SLOViolationCalculator.get_Severity_all_metrics_method(prediction_attribute, SLOSubRule.RuleType.greater_than_rule)*100) == 6125;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void prconf_delta_Severity_test(){
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,280 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2023 Institute of Communication and Computer Systems
|
||||||
|
*
|
||||||
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
||||||
|
*/
|
||||||
|
|
||||||
|
import eu.melodic.event.brokerclient.BrokerPublisher;
|
||||||
|
import eu.melodic.event.brokerclient.BrokerSubscriber;
|
||||||
|
import eu.melodic.event.brokerclient.templates.EventFields;
|
||||||
|
import eu.melodic.event.brokerclient.templates.TopicNames;
|
||||||
|
import org.json.simple.JSONArray;
|
||||||
|
import org.json.simple.JSONObject;
|
||||||
|
import org.json.simple.parser.JSONParser;
|
||||||
|
import org.json.simple.parser.ParseException;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import runtime.Main;
|
||||||
|
import slo_processing.SLORule;
|
||||||
|
import slo_processing.SLOSubRule;
|
||||||
|
import utility_beans.MonitoringAttributeUtilities;
|
||||||
|
import utility_beans.PredictedMonitoringAttribute;
|
||||||
|
import utility_beans.RealtimeMonitoringAttribute;
|
||||||
|
|
||||||
|
import java.io.*;
|
||||||
|
|
||||||
|
import java.net.URI;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Properties;
|
||||||
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
import java.util.function.BiFunction;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
|
import static configuration.Constants.*;
|
||||||
|
import static runtime.Main.PREDICTION_EXISTS;
|
||||||
|
import static runtime.Main.initialize_subrule_and_attribute_associations;
|
||||||
|
import static slo_processing.SLORule.process_rule_value;
|
||||||
|
import static utility_beans.PredictedMonitoringAttribute.getPredicted_monitoring_attributes;
|
||||||
|
import static utility_beans.RealtimeMonitoringAttribute.*;
|
||||||
|
|
||||||
|
class MetricConfiguration{
|
||||||
|
public String name;
|
||||||
|
public double base_metric_value;
|
||||||
|
public double forecasted_metric_value;
|
||||||
|
public double confidence_interval;
|
||||||
|
public double probability;
|
||||||
|
public MetricConfiguration(String name, double base_metric_value, double forecasted_metric_value, double confidence_interval, double probability){
|
||||||
|
this.name = name;
|
||||||
|
this.base_metric_value = base_metric_value;
|
||||||
|
this.forecasted_metric_value = forecasted_metric_value;
|
||||||
|
this.confidence_interval = confidence_interval;
|
||||||
|
this.probability = probability;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public class UnboundedMonitoringAttributeTests {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This 30-second test assumes the availability of a broker, which is configured in the standard configuration file location employed by the Main method. It also assumes that random input of a custom metric centered around a value (75 for the first test) is provided (using an independent data provider). Based on this constant input, the standard deviation and mean is calculated, and the lower/upper bounds are estimated - in the case of the first test it is assumed that the metric is upwards and downwards unbounded. The assertions of the test are estimations based on repeated iterations with 100-sample data.
|
||||||
|
*/
|
||||||
|
|
||||||
|
//private String metric_1_name = "custom_metric_1";
|
||||||
|
private static Long targeted_prediction_time = 100000000000L;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void unbounded_monitoring_attribute_test_1() throws IOException, ParseException {
|
||||||
|
unbounded_monitoring_attribute_test_core("src/main/resources/test_v3_custom_metric_1_simple.json","custom_metric_1",new Double[]{20.0,35.0},new Double[]{110.0,130.0},0.0,50,100, 90,10,0.80);
|
||||||
|
}
|
||||||
|
@Test
|
||||||
|
public void unbounded_monitoring_attribute_test_2() throws IOException, ParseException {
|
||||||
|
unbounded_monitoring_attribute_test_core("src/main/resources/test_v3_custom_metric_unequal.json","number_of_users",new Double[]{-50.0,0.0},new Double[]{0.0,50.0},0.0,-25,20,5,0.3,0.90);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void unbounded_monitoring_attribute_test_core(String json_file_name, String metric_1_name, Double[] metric_lower_bound_range, Double[] metric_upper_bound_range, double severity_lower_bound, double base_metric_value, double metric_max_value, double forecasted_metric_value,double generated_data_confidence_interval, double probability) throws IOException, ParseException {
|
||||||
|
|
||||||
|
Main.can_modify_slo_rules.setValue(true);
|
||||||
|
Properties prop = new Properties();
|
||||||
|
|
||||||
|
URI absolute_configuration_file_path = new File(configuration_file_location).toURI();
|
||||||
|
base_project_path = new File("").toURI();
|
||||||
|
URI relative_path = base_project_path.relativize(absolute_configuration_file_path);
|
||||||
|
|
||||||
|
InputStream inputStream = new FileInputStream(base_project_path.getPath()+relative_path.getPath());
|
||||||
|
|
||||||
|
prop.load(inputStream);
|
||||||
|
|
||||||
|
String broker_ip_address = prop.getProperty("broker_ip_address");
|
||||||
|
String broker_username = prop.getProperty("broker_username");
|
||||||
|
String broker_password = prop.getProperty("broker_password");
|
||||||
|
|
||||||
|
String metric_string = metric_1_name+";unbounded;unbounded";
|
||||||
|
String [] metric_names = {metric_string.split(";")[0]};
|
||||||
|
getMonitoring_attributes_bounds_representation().put(metric_string.split(";")[0], metric_string.split(";",2)[1]);
|
||||||
|
|
||||||
|
|
||||||
|
slo_violation_determination_method = "all-metrics";
|
||||||
|
JSONObject rule_json = (JSONObject) new JSONParser().parse(String.join(EMPTY, Files.readAllLines(Paths.get(new File(json_file_name).getAbsolutePath()))));
|
||||||
|
|
||||||
|
ArrayList<SLORule> slo_rules = new ArrayList<>();
|
||||||
|
SLORule slo_rule = new SLORule(rule_json.toJSONString(), new ArrayList<>(Arrays.asList(new String[]{metric_1_name})));
|
||||||
|
slo_rules.add(slo_rule);
|
||||||
|
initialize_subrule_and_attribute_associations(slo_rules);
|
||||||
|
|
||||||
|
data_publisher_for_unbounded_test(metric_1_name, metric_max_value, base_metric_value,forecasted_metric_value,generated_data_confidence_interval,probability);
|
||||||
|
|
||||||
|
ArrayList<Thread> running = new ArrayList<>();
|
||||||
|
for (String metric_name : metric_names) {
|
||||||
|
MonitoringAttributeUtilities.initialize_values(metric_name);
|
||||||
|
|
||||||
|
String realtime_metric_topic_name = TopicNames.realtime_metric_values_topic(metric_name);
|
||||||
|
Logger.getAnonymousLogger().log(Level.INFO, "Starting realtime subscription at " + realtime_metric_topic_name);
|
||||||
|
BrokerSubscriber subscriber = new BrokerSubscriber(realtime_metric_topic_name, broker_ip_address, broker_username, broker_password, amq_library_configuration_location);
|
||||||
|
BiFunction<String, String, String> function = (topic, message) -> {
|
||||||
|
synchronized (RealtimeMonitoringAttribute.getMonitoring_attributes().get(topic)) {
|
||||||
|
try {
|
||||||
|
update_monitoring_attribute_value(topic, ((Number) ((JSONObject) new JSONParser().parse(message)).get("metricValue")).doubleValue());
|
||||||
|
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level, "RECEIVED message with value for " + topic + " equal to " + (((JSONObject) new JSONParser().parse(message)).get("metricValue")));
|
||||||
|
} catch (ParseException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
};
|
||||||
|
Thread realtime_subscription_thread = new Thread(() -> {
|
||||||
|
subscriber.subscribe(function,new AtomicBoolean(false)); //will be a short-lived test, so setting stop signal to false
|
||||||
|
// Insert some method call here.
|
||||||
|
});
|
||||||
|
realtime_subscription_thread.start();
|
||||||
|
running.add(realtime_subscription_thread);
|
||||||
|
|
||||||
|
|
||||||
|
String forecasted_metric_topic_name = TopicNames.final_metric_predictions_topic(metric_name);
|
||||||
|
BrokerSubscriber forecasted_subscriber = new BrokerSubscriber(forecasted_metric_topic_name, broker_ip_address,broker_username,broker_password, amq_library_configuration_location);
|
||||||
|
BiFunction<String,String,String> forecasted_function = (topic,message) ->{
|
||||||
|
String predicted_attribute_name = topic.replaceFirst("prediction\\.",EMPTY);
|
||||||
|
HashMap<Integer, HashMap<Long, PredictedMonitoringAttribute>> predicted_attributes = getPredicted_monitoring_attributes();
|
||||||
|
try {
|
||||||
|
double forecasted_value = ((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.metric_value)).doubleValue();
|
||||||
|
double probability_confidence = 100*((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.probability)).doubleValue();
|
||||||
|
//double confidence_interval = ((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.confidence_interval)).doubleValue();
|
||||||
|
JSONArray json_array_confidence_interval = ((JSONArray)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.confidence_interval));
|
||||||
|
double confidence_interval = ((Number)json_array_confidence_interval.get(1)).doubleValue() - ((Number)json_array_confidence_interval.get(0)).doubleValue();
|
||||||
|
long timestamp = ((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.timestamp)).longValue();
|
||||||
|
long targeted_prediction_time = ((Number)((JSONObject)new JSONParser().parse(message)).get(EventFields.PredictionMetricEventFields.prediction_time)).longValue();
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"RECEIVED message with predicted value for "+predicted_attribute_name+" equal to "+ forecasted_value);
|
||||||
|
|
||||||
|
synchronized (Main.can_modify_slo_rules) {
|
||||||
|
if(!Main.can_modify_slo_rules.getValue()) {
|
||||||
|
Main.can_modify_slo_rules.wait();
|
||||||
|
}
|
||||||
|
Main.can_modify_slo_rules.setValue(false);
|
||||||
|
|
||||||
|
if( Main.adaptation_times.size()==0 || (!Main.adaptation_times.contains(targeted_prediction_time)) && targeted_prediction_time>Main.adaptation_times.stream().min(Long::compare).get()){
|
||||||
|
Logger.getAnonymousLogger().log(info_logging_level,"Adding a new targeted prediction time "+targeted_prediction_time);
|
||||||
|
Main.adaptation_times.add(targeted_prediction_time);
|
||||||
|
synchronized (PREDICTION_EXISTS) {
|
||||||
|
PREDICTION_EXISTS.setValue(true);
|
||||||
|
PREDICTION_EXISTS.notifyAll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//predicted_attributes.get(predicted_attribute_name).clear();
|
||||||
|
for (SLOSubRule subrule : SLOSubRule.getSlo_subrules_per_monitoring_attribute().get(predicted_attribute_name)) {
|
||||||
|
getPredicted_monitoring_attributes().computeIfAbsent(subrule.getId(), k -> new HashMap<>());
|
||||||
|
if ( (getPredicted_monitoring_attributes().get(subrule.getId()).get(targeted_prediction_time)!=null) &&(getPredicted_monitoring_attributes().get(subrule.getId()).get(targeted_prediction_time).getTimestamp()>timestamp)){
|
||||||
|
//do nothing, as in this case an older prediction has arrived for a metric delayed, and so it should be disregarded
|
||||||
|
}else {
|
||||||
|
PredictedMonitoringAttribute prediction_attribute = new PredictedMonitoringAttribute(predicted_attribute_name, subrule.getThreshold(), subrule.getId(), forecasted_value, probability_confidence, confidence_interval, timestamp,targeted_prediction_time);
|
||||||
|
|
||||||
|
//predicted_attributes.get(predicted_attribute_name).add(prediction_attribute);
|
||||||
|
subrule.setAssociated_predicted_monitoring_attribute(prediction_attribute);
|
||||||
|
|
||||||
|
getPredicted_monitoring_attributes().get(subrule.getId()).put(targeted_prediction_time, prediction_attribute);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Main.can_modify_slo_rules.setValue(true);
|
||||||
|
}
|
||||||
|
//SLOViolationCalculator.get_Severity_all_metrics_method(prediction_attribute)
|
||||||
|
|
||||||
|
} catch (ParseException | InterruptedException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
return message;
|
||||||
|
};
|
||||||
|
Thread forecasted_subscription_thread = new Thread(() -> {
|
||||||
|
synchronized (Main.HAS_MESSAGE_ARRIVED.get_synchronized_boolean(forecasted_metric_topic_name)) {
|
||||||
|
//if (Main.HAS_MESSAGE_ARRIVED.get_synchronized_boolean(forecasted_metric_topic_name).getValue())
|
||||||
|
forecasted_subscriber.subscribe(forecasted_function,new AtomicBoolean(false)); //will be a short-lived test, so setting stop signal to false
|
||||||
|
}
|
||||||
|
});
|
||||||
|
running.add(forecasted_subscription_thread);
|
||||||
|
forecasted_subscription_thread.start();
|
||||||
|
|
||||||
|
}
|
||||||
|
try{
|
||||||
|
Thread.sleep(30000);
|
||||||
|
}catch (Exception e){
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
double upper_bound = getMonitoring_attributes_statistics().get(metric_1_name).getUpper_bound();
|
||||||
|
double lower_bound = getMonitoring_attributes_statistics().get(metric_1_name).getLower_bound();
|
||||||
|
|
||||||
|
Logger.getAnonymousLogger().log(Level.INFO,"The bounds calculated are\nLower bound: "+lower_bound+"\nUpper bound: "+upper_bound);
|
||||||
|
//assert (upper_bound<130 && upper_bound>110 && lower_bound>20 && lower_bound <35);
|
||||||
|
assert (upper_bound<metric_upper_bound_range[1] && upper_bound>metric_upper_bound_range[0] && lower_bound>metric_lower_bound_range[0] && lower_bound <metric_lower_bound_range[1]);
|
||||||
|
|
||||||
|
double rule_severity = process_rule_value(rule_json,targeted_prediction_time, slo_rule.getRule_format());
|
||||||
|
Logger.getAnonymousLogger().log(Level.INFO,"The severity calculated is\nSeverity: "+rule_severity);
|
||||||
|
assert (rule_severity>severity_lower_bound);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void all_metrics_unbounded_monitoring_attribute_Severity_test(){
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public void data_publisher_for_unbounded_test(String metric_name, double metric_max_value, double base_metric_value, double forecasted_metric_value, double confidence_interval, double probability ){
|
||||||
|
|
||||||
|
int publish_interval_in_milliseconds = 100;
|
||||||
|
MetricConfiguration custom_metric_1 = new MetricConfiguration(metric_name,base_metric_value,forecasted_metric_value,confidence_interval,probability);
|
||||||
|
//MetricConfiguration ram_metric = new MetricConfiguration("ram",90,100,5,100);
|
||||||
|
|
||||||
|
|
||||||
|
ArrayList<MetricConfiguration> metrics = new ArrayList<>();
|
||||||
|
metrics.add(custom_metric_1);
|
||||||
|
|
||||||
|
for (MetricConfiguration metric: metrics) {
|
||||||
|
Thread publishing_thread = new Thread(() -> {
|
||||||
|
perpetual_metric_publisher(metric.name,metric.base_metric_value,metric.forecasted_metric_value,metric.confidence_interval,metric.probability, metric_max_value, publish_interval_in_milliseconds);
|
||||||
|
});
|
||||||
|
publishing_thread.start();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void perpetual_metric_publisher(String metric_name, double base_metric_value, double forecasted_metric_value, double confidence_interval, double probability, double metric_max_value, int publish_interval_in_milliseconds) {
|
||||||
|
BrokerPublisher realtime_data_publisher = new BrokerPublisher(metric_name, "tcp://localhost:61616", "admin", "admin","src/main/resources/config/eu.melodic.event.brokerclient.properties");
|
||||||
|
BrokerPublisher forecasted_data_publisher = new BrokerPublisher("prediction."+metric_name, "tcp://localhost:61616", "admin", "admin","src/main/resources/config/eu.melodic.event.brokerclient.properties");
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
try {
|
||||||
|
JSONObject realtime_metric_json_object = new JSONObject();
|
||||||
|
//Create values centered around 82.5
|
||||||
|
double random_value = ThreadLocalRandom.current().nextDouble();
|
||||||
|
realtime_metric_json_object.put("metricValue", base_metric_value+random_value*(metric_max_value-base_metric_value));
|
||||||
|
realtime_metric_json_object.put("timestamp",System.currentTimeMillis());
|
||||||
|
realtime_data_publisher.publish(realtime_metric_json_object.toJSONString());
|
||||||
|
|
||||||
|
JSONObject forecasted_metric_json_object = new JSONObject();
|
||||||
|
forecasted_metric_json_object.put("metricValue", forecasted_metric_value);
|
||||||
|
forecasted_metric_json_object.put("timestamp",System.currentTimeMillis());
|
||||||
|
forecasted_metric_json_object.put("probability",probability);
|
||||||
|
forecasted_metric_json_object.put(EventFields.PredictionMetricEventFields.prediction_time,targeted_prediction_time);
|
||||||
|
//((System.currentTimeMillis()/1000)%60)*60000+1); //The prediction supposedly reflects the metric values at the next minute
|
||||||
|
JSONArray confidence_interval_list = new JSONArray();
|
||||||
|
confidence_interval_list.add((forecasted_metric_value-confidence_interval/2));
|
||||||
|
confidence_interval_list.add((forecasted_metric_value+confidence_interval/2));
|
||||||
|
forecasted_metric_json_object.put("confidence_interval",confidence_interval_list);
|
||||||
|
forecasted_data_publisher.publish(forecasted_metric_json_object.toJSONString());
|
||||||
|
Thread.sleep(publish_interval_in_milliseconds);
|
||||||
|
|
||||||
|
}catch (InterruptedException i){
|
||||||
|
i.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
@ -8,15 +8,15 @@
|
|||||||
- nebulous-slo-violation-detector-container-images
|
- nebulous-slo-violation-detector-container-images
|
||||||
description: Build the container images.
|
description: Build the container images.
|
||||||
files: &image_files
|
files: &image_files
|
||||||
- ^java-spring-boot-demo/
|
- ^slo-violation-detector/
|
||||||
vars: &image_vars
|
vars: &image_vars
|
||||||
promote_container_image_job: nebulous-slo-violation-detector-upload-container-images
|
promote_container_image_job: nebulous-slo-violation-detector-upload-container-images
|
||||||
container_images:
|
container_images:
|
||||||
- context: java-spring-boot-demo
|
- context: slo-violation-detector
|
||||||
registry: quay.io
|
registry: quay.io
|
||||||
repository: quay.io/nebulous/slo-violation-detector-java-spring-boot-demo
|
repository: quay.io/nebulous/slo-violation-detector
|
||||||
namespace: nebulous
|
namespace: nebulous
|
||||||
repo_shortname: slo-violation-detector-java-spring-boot-demo
|
repo_shortname: slo-violation-detector
|
||||||
repo_description: ""
|
repo_description: ""
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
@ -44,7 +44,7 @@
|
|||||||
description: Run Hadolint on Dockerfile(s).
|
description: Run Hadolint on Dockerfile(s).
|
||||||
vars:
|
vars:
|
||||||
dockerfiles:
|
dockerfiles:
|
||||||
- java-spring-boot-demo/Dockerfile
|
- slo-violation-detector/Dockerfile
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: nebulous-slo-violation-detector-helm-lint
|
name: nebulous-slo-violation-detector-helm-lint
|
||||||
|
Loading…
Reference in New Issue
Block a user