Skip to content
This repository was archived by the owner on Oct 21, 2025. It is now read-only.

Commit e008b6e

Browse files
authored
Merge pull request #6 from DataFlowAnalysis/praktikum
Constraint Select and TFG highlighting
2 parents 9767d94 + b15d85d commit e008b6e

5 files changed

Lines changed: 152 additions & 227 deletions

File tree

bundles/org.dataflowanalysis.standalone/META-INF/MANIFEST.MF

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,11 @@ Require-Bundle: org.dataflowanalysis.dfd.datadictionary;bundle-version="2.0.0",
2525
org.dataflowanalysis.pcm.extension.nodecharacteristics;bundle-version="0.1.0",
2626
org.dataflowanalysis.analysis.pcm,
2727
org.palladiosimulator.commons.stoex;bundle-version="5.2.1",
28-
de.uka.ipd.sdq.stoex.analyser;bundle-version="5.2.1"
28+
de.uka.ipd.sdq.stoex.analyser;bundle-version="5.2.1",
29+
com.fasterxml.jackson.core.jackson-core;bundle-version="2.13.2",
30+
com.fasterxml.jackson.core.jackson-annotations;bundle-version="2.13.2",
31+
com.fasterxml.jackson.core.jackson-databind;bundle-version="2.13.2",
32+
org.apache.log4j;bundle-version="1.2.24"
2933
Automatic-Module-Name: DataFlowAnalalysisStandalone
3034
Bundle-RequiredExecutionEnvironment: JavaSE-17
3135
Export-Package:
Lines changed: 114 additions & 186 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,13 @@
11
package org.dataflowanalysis.standalone.analysis;
22

33
import java.io.File;
4+
import java.io.IOException;
45
import java.nio.file.Files;
5-
import java.util.ArrayList;
6-
import java.util.HashMap;
76
import java.util.List;
8-
import java.util.Map;
97

10-
import org.apache.log4j.Level;
118
import org.apache.log4j.Logger;
12-
import org.dataflowanalysis.analysis.dfd.DFDDataFlowAnalysisBuilder;
13-
import org.dataflowanalysis.analysis.dfd.resource.DFDModelResourceProvider;
149
import org.dataflowanalysis.analysis.dfd.simple.DFDSimpleTransposeFlowGraphFinder;
1510
import org.dataflowanalysis.analysis.dsl.AnalysisConstraint;
16-
import org.dataflowanalysis.analysis.dsl.result.DSLResult;
1711
import org.dataflowanalysis.analysis.utils.StringView;
1812
import org.dataflowanalysis.converter.dfd2web.DataFlowDiagramAndDictionary;
1913
import org.dataflowanalysis.converter.dfd2web.DFD2WebConverter;
@@ -22,12 +16,9 @@
2216
import org.dataflowanalysis.converter.web2dfd.Web2DFDConverter;
2317
import org.dataflowanalysis.converter.web2dfd.WebEditorConverterModel;
2418
import org.dataflowanalysis.converter.web2dfd.model.WebEditorDfd;
25-
import org.dataflowanalysis.converter.web2dfd.model.Child;
26-
import org.dataflowanalysis.converter.web2dfd.model.Annotation;
2719
import org.dataflowanalysis.dfd.datadictionary.DataDictionary;
2820
import org.dataflowanalysis.dfd.datadictionary.datadictionaryPackage;
2921
import org.dataflowanalysis.dfd.dataflowdiagram.DataFlowDiagram;
30-
import org.dataflowanalysis.dfd.dataflowdiagram.Node;
3122
import org.dataflowanalysis.dfd.dataflowdiagram.dataflowdiagramPackage;
3223
import org.eclipse.emf.common.util.URI;
3324
import org.eclipse.emf.ecore.resource.Resource;
@@ -37,183 +28,120 @@
3728
import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;
3829

3930
public class Converter {
40-
41-
private static final Logger logger = Logger.getLogger(AnalysisConstraint.class);
42-
43-
/**
44-
* Convertes a DFD from the Ecore to the WebEditor Json representation
45-
* @param dfd File where DFD is saved
46-
* @param dd File where DD is saved
47-
* @return Created WebEditor Json representation
48-
*/
49-
public static WebEditorDfd convertDFD(File dfd, File dd){
50-
try {
51-
var converter = new DFD2WebConverter();
52-
53-
ResourceSet rs = new ResourceSetImpl();
54-
rs.getResourceFactoryRegistry().getExtensionToFactoryMap().put(Resource.Factory.Registry.DEFAULT_EXTENSION, new XMIResourceFactoryImpl());
55-
rs.getPackageRegistry().put(dataflowdiagramPackage.eNS_URI, dataflowdiagramPackage.eINSTANCE);
56-
rs.getPackageRegistry().put(datadictionaryPackage.eNS_URI, datadictionaryPackage.eINSTANCE);
31+
32+
private static final Logger logger = Logger.getLogger(Converter.class);
5733

58-
Resource ddResource = rs.getResource(URI.createFileURI(dd.toString()), true);
59-
Resource dfdResource = rs.getResource(URI.createFileURI(dfd.toString()), true);
60-
System.out.println(dd.toString());
61-
System.out.println(dfd.toString());
62-
EcoreUtil.resolveAll(rs);
63-
EcoreUtil.resolveAll(ddResource);
64-
EcoreUtil.resolveAll(dfdResource);
65-
DataFlowDiagramAndDictionary dfdAndDD = new DataFlowDiagramAndDictionary((DataFlowDiagram)dfdResource.getContents().get(0), (DataDictionary)ddResource.getContents().get(0));
66-
67-
var newJson = converter.convert(dfdAndDD);
68-
69-
return newJson.getModel();
70-
71-
} catch (Exception e) {
72-
e.printStackTrace();
73-
return null;
74-
}
75-
}
76-
77-
78-
/**
79-
* Convertes a Model in PCM representation into a WebEditor Json represenation
80-
* @param usageModelFile File where Usage Model is saved
81-
* @param allocationModelFile File where Allocation Model is saved
82-
* @param nodeCharacteristicsFile File where Node Characteristics Model is saved
83-
* @return Created WebEditor Json representation
84-
*/
85-
public static WebEditorDfd convertPCM(File usageModelFile, File allocationModelFile, File nodeCharacteristicsFile){
86-
try {
87-
var converter = new PCM2DFDConverter();
88-
var dfd = converter.convert(new PCMConverterModel(usageModelFile.toString(), allocationModelFile.toString(), nodeCharacteristicsFile.toString()));
89-
90-
91-
var dfdConverter = new DFD2WebConverter();
92-
dfdConverter.setTransposeFlowGraphFinder(DFDSimpleTransposeFlowGraphFinder.class);
93-
return dfdConverter.convert(dfd).getModel();
94-
95-
} catch (Exception e) {
96-
e.printStackTrace();
97-
}
98-
return null;
99-
}
100-
101-
/**
102-
* Analyzes a Model in WebEditor Json Representation and returns the analyzed Model
103-
* @param webEditorDfd Model to be analyzed
104-
* @return Analyzed Model
105-
*/
106-
public static WebEditorDfd analyzeAnnotate(WebEditorDfd webEditorDfd) {
107-
try {
108-
var webEditorconverter = new Web2DFDConverter();
109-
var dd = webEditorconverter.convert(new WebEditorConverterModel(webEditorDfd));
110-
var dfdConverter = new DFD2WebConverter();
111-
var newJson = dfdConverter.convert(dd).getModel();
112-
if (webEditorDfd.constraints() != null && !webEditorDfd.constraints().isEmpty()) {
113-
var constraints = parseConstraints(webEditorDfd);
114-
var violations = runAnalysis(dd, constraints);
115-
newJson.constraints().addAll(webEditorDfd.constraints()); //Reapply constraints
116-
return annotateViolations(newJson, violations);
117-
}
118-
return newJson;
119-
} catch (Exception e) {
120-
e.printStackTrace();
121-
}
122-
return null;
123-
}
124-
125-
/**
126-
* Converts a model in WebEditor Json representation into the DFD metamodel representation and return the DFD files as a concatenated string
127-
* @param webEditorDfd model in WebEditor Json representation to be converted
128-
* @param name Name of the files to be created
129-
* @return Concatenation of DFD and DD files as string
130-
*/
131-
public static String convertToDFDandStringify(WebEditorDfd webEditorDfd, String name) {
132-
try {
133-
var converter = new Web2DFDConverter();
134-
var dfd = converter.convert(new WebEditorConverterModel(webEditorDfd));
135-
String tempDir = System.getProperty("java.io.tmpdir");
136-
var dfdFile = new File(tempDir, name + ".dataflowdiagram");
137-
var ddFile = new File(tempDir, name + ".datadictionary");
138-
dfd.save(dfdFile.getParent(), name);
139-
140-
String dfdContent = Files.readString(dfdFile.toPath());
141-
String ddContent = Files.readString(ddFile.toPath());
34+
/**
35+
* Convertes a DFD from the Ecore to the WebEditor Json representation
36+
* @param dfd File where DFD is saved
37+
* @param dd File where DD is saved
38+
* @return Created WebEditor Json representation
39+
*/
40+
public static WebEditorDfd convertDFD(File dfd, File dd){
41+
var converter = new DFD2WebConverter();
42+
43+
ResourceSet rs = new ResourceSetImpl();
44+
rs.getResourceFactoryRegistry().getExtensionToFactoryMap().put(Resource.Factory.Registry.DEFAULT_EXTENSION, new XMIResourceFactoryImpl());
45+
rs.getPackageRegistry().put(dataflowdiagramPackage.eNS_URI, dataflowdiagramPackage.eINSTANCE);
46+
rs.getPackageRegistry().put(datadictionaryPackage.eNS_URI, datadictionaryPackage.eINSTANCE);
14247

143-
dfdFile.delete();
144-
ddFile.delete();
145-
return dfdContent + "\n" + ddContent;
146-
147-
} catch (Exception e) {
148-
e.printStackTrace();
149-
return null;
150-
}
151-
}
152-
153-
private static List<AnalysisConstraint> parseConstraints(WebEditorDfd webEditorDfd) {
154-
return webEditorDfd.constraints().stream()
155-
.filter(it -> it.constraint() != null && !it.constraint().isEmpty())
156-
.map(it -> {
157-
return AnalysisConstraint.fromString(new StringView(it.constraint())).getResult();
158-
}).toList();
159-
}
160-
161-
private static List<DSLResult> runAnalysis(DataFlowDiagramAndDictionary dfd, List<AnalysisConstraint> constraints) {
162-
var analysis = new DFDDataFlowAnalysisBuilder()
163-
.standalone()
164-
.modelProjectName(" ")
165-
.useCustomResourceProvider(new DFDModelResourceProvider(dfd.dataDictionary(), dfd.dataFlowDiagram()))
166-
.build();
167-
48+
Resource ddResource = rs.getResource(URI.createFileURI(dd.toString()), true);
49+
Resource dfdResource = rs.getResource(URI.createFileURI(dfd.toString()), true);
50+
EcoreUtil.resolveAll(rs);
51+
EcoreUtil.resolveAll(ddResource);
52+
EcoreUtil.resolveAll(dfdResource);
53+
DataFlowDiagramAndDictionary dfdAndDD = new DataFlowDiagramAndDictionary((DataFlowDiagram)dfdResource.getContents().get(0), (DataDictionary)ddResource.getContents().get(0));
54+
55+
var newJson = converter.convert(dfdAndDD);
56+
57+
return newJson.getModel();
58+
}
59+
60+
61+
/**
62+
* Convertes a Model in PCM representation into a WebEditor Json represenation
63+
* @param usageModelFile File where Usage Model is saved
64+
* @param allocationModelFile File where Allocation Model is saved
65+
* @param nodeCharacteristicsFile File where Node Characteristics Model is saved
66+
* @return Created WebEditor Json representation
67+
*/
68+
public static WebEditorDfd convertPCM(File usageModelFile, File allocationModelFile, File nodeCharacteristicsFile){
69+
var converter = new PCM2DFDConverter();
70+
var dfd = converter.convert(new PCMConverterModel(usageModelFile.toString(), allocationModelFile.toString(), nodeCharacteristicsFile.toString()));
71+
72+
73+
var dfdConverter = new DFD2WebConverter();
74+
dfdConverter.setTransposeFlowGraphFinder(DFDSimpleTransposeFlowGraphFinder.class);
75+
return dfdConverter.convert(dfd).getModel();
76+
}
77+
78+
/**
79+
* Analyzes a Model in WebEditor Json Representation and returns the analyzed Model
80+
* @param webEditorDfd Model to be analyzed
81+
* @return Analyzed Model
82+
*/
83+
public static WebEditorDfd analyzeAnnotate(WebEditorDfd webEditorDfd) {
84+
var webEditorconverter = new Web2DFDConverter();
85+
var dd = webEditorconverter.convert(new WebEditorConverterModel(webEditorDfd));
86+
var dfdConverter = new DFD2WebConverter();
87+
if (webEditorDfd.constraints() != null && !webEditorDfd.constraints().isEmpty()) {
88+
var constraints = parseConstraints(webEditorDfd);
89+
dfdConverter.setConstraints(constraints);
90+
}
91+
var newJson = dfdConverter.convert(dd).getModel();
92+
93+
for (var child : newJson.model().children()) {
94+
if (child.type().startsWith("node") && child.annotations() != null) {
95+
var oldNode = webEditorDfd.model().children().stream().filter(node -> node.id().equals(child.id())).findAny().orElseThrow();
96+
//Necessary if ugly if we want to preserver custom annotations
97+
var annotationsToRemove = oldNode.annotations().stream().filter(a -> a.message().startsWith("Propagated") || a.message().startsWith("Incoming") || a.message().startsWith("Constraint")).toList();
98+
oldNode.annotations().removeAll(annotationsToRemove);
99+
oldNode.annotations().addAll(child.annotations());
100+
}
101+
}
102+
return webEditorDfd;
103+
}
104+
105+
/**
106+
* Converts a model in WebEditor Json representation into the DFD metamodel representation and return the DFD files as a concatenated string
107+
* @param webEditorDfd model in WebEditor Json representation to be converted
108+
* @param name Name of the files to be created
109+
* @return Concatenation of DFD and DD files as string
110+
*/
111+
public static String convertToDFDandStringify(WebEditorDfd webEditorDfd, String name) {
112+
try {
113+
var converter = new Web2DFDConverter();
114+
var dfd = converter.convert(new WebEditorConverterModel(webEditorDfd));
115+
String tempDir = System.getProperty("java.io.tmpdir");
116+
var dfdFile = new File(tempDir, name + ".dataflowdiagram");
117+
var ddFile = new File(tempDir, name + ".datadictionary");
118+
dfd.save(dfdFile.getParent(), name);
119+
120+
String dfdContent = Files.readString(dfdFile.toPath());
121+
String ddContent = Files.readString(ddFile.toPath());
168122

169-
logger.setLevel(Level.DEBUG);
170-
171-
var tfg = analysis.findFlowGraphs();
172-
tfg.evaluate();
173-
174-
175-
return constraints.stream().flatMap(it -> it.findViolations(tfg).stream()).toList();
176-
}
177-
178-
private static WebEditorDfd annotateViolations(WebEditorDfd webEditorDfd, List<DSLResult> violations) {
179-
Map<Child, String> nodeToAnnotationMap = new HashMap<>();
180-
181-
for (int i = 0; i < violations.size(); i++) {
182-
final int index = i;
183-
violations.get(i).getMatchedVertices().stream().forEach(it -> {
184-
var node = webEditorDfd.model().children().stream()
185-
.filter(child -> child.id().equals(((Node)it.getReferencedElement()).getId())).findFirst().orElseThrow();
186-
var annotation = "";
187-
if (nodeToAnnotationMap.containsKey(node)) {
188-
annotation = nodeToAnnotationMap.get(node);
189-
annotation += "\n";
190-
}
191-
annotation += "Constraint " + index + " violated";
192-
nodeToAnnotationMap.put(node, annotation);
193-
});
194-
}
195-
196-
List<Child> newChildren = new ArrayList<>();
197-
198-
for (Child child : webEditorDfd.model().children()) {
199-
if (nodeToAnnotationMap.containsKey(child)) {
200-
StringBuilder builder = new StringBuilder();
201-
if(child.annotation() != null) builder.append(child.annotation().message().toString());
202-
if (builder.toString() != "") builder.append("\n");
203-
builder.append(nodeToAnnotationMap.get(child));
204-
205-
var annotation = new Annotation(builder.toString(), "bolt", "#ff0000");
206-
207-
var newChild = new Child(child.text(), child.labels(), child.ports(), child.id(), child.type(), null, null,annotation, child.children());
208-
newChildren.add(newChild);
209-
}
210-
}
211-
212-
webEditorDfd.model().children().removeAll(nodeToAnnotationMap.keySet());
213-
webEditorDfd.model().children().addAll(newChildren);
214-
215-
return webEditorDfd;
216-
}
217-
218-
123+
dfdFile.delete();
124+
ddFile.delete();
125+
return dfdContent + "\n" + ddContent;
126+
127+
} catch (IOException e) {
128+
e.printStackTrace();
129+
return "Error";
130+
}
131+
}
132+
133+
private static List<AnalysisConstraint> parseConstraints(WebEditorDfd webEditorDfd) {
134+
return webEditorDfd.constraints().stream()
135+
.filter(it -> it.constraint() != null && !it.constraint().isEmpty())
136+
.map(it -> {
137+
StringView string = new StringView("- " + it.name() + ": " + it.constraint().replace("\n", ""));
138+
var constraint = AnalysisConstraint.fromString(string);
139+
if (constraint.failed()) {
140+
logger.error(constraint.getError());
141+
throw new IllegalArgumentException("Unable to parse constraint: " + it.name());
142+
}
143+
var constraint2 = constraint.getResult();
144+
return constraint2;
145+
}).toList();
146+
}
219147
}

0 commit comments

Comments
 (0)