Skip to content
This repository was archived by the owner on Oct 21, 2025. It is now read-only.

Commit b5919fd

Browse files
committed
Feat: Replace/Remove sysout with logger, Add more informative error handling
1 parent 327135d commit b5919fd

5 files changed

Lines changed: 134 additions & 250 deletions

File tree

bundles/org.dataflowanalysis.standalone/META-INF/MANIFEST.MF

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,11 @@ Require-Bundle: org.dataflowanalysis.dfd.datadictionary;bundle-version="2.0.0",
2525
org.dataflowanalysis.pcm.extension.nodecharacteristics;bundle-version="0.1.0",
2626
org.dataflowanalysis.analysis.pcm,
2727
org.palladiosimulator.commons.stoex;bundle-version="5.2.1",
28-
de.uka.ipd.sdq.stoex.analyser;bundle-version="5.2.1"
28+
de.uka.ipd.sdq.stoex.analyser;bundle-version="5.2.1",
29+
com.fasterxml.jackson.core.jackson-core;bundle-version="2.13.2",
30+
com.fasterxml.jackson.core.jackson-annotations;bundle-version="2.13.2",
31+
com.fasterxml.jackson.core.jackson-databind;bundle-version="2.13.2",
32+
org.apache.log4j;bundle-version="1.2.24"
2933
Automatic-Module-Name: DataFlowAnalalysisStandalone
3034
Bundle-RequiredExecutionEnvironment: JavaSE-17
3135
Export-Package:
Lines changed: 106 additions & 212 deletions
Original file line numberDiff line numberDiff line change
@@ -1,24 +1,13 @@
11
package org.dataflowanalysis.standalone.analysis;
22

33
import java.io.File;
4+
import java.io.IOException;
45
import java.nio.file.Files;
5-
import java.util.ArrayList;
6-
import java.util.HashMap;
76
import java.util.List;
8-
import java.util.Map;
9-
import java.awt.Color;
10-
import java.nio.charset.StandardCharsets;
117

12-
import java.security.MessageDigest;
13-
import java.security.NoSuchAlgorithmException;
14-
15-
import org.apache.log4j.Level;
168
import org.apache.log4j.Logger;
17-
import org.dataflowanalysis.analysis.dfd.DFDDataFlowAnalysisBuilder;
18-
import org.dataflowanalysis.analysis.dfd.resource.DFDModelResourceProvider;
199
import org.dataflowanalysis.analysis.dfd.simple.DFDSimpleTransposeFlowGraphFinder;
2010
import org.dataflowanalysis.analysis.dsl.AnalysisConstraint;
21-
import org.dataflowanalysis.analysis.dsl.result.DSLResult;
2211
import org.dataflowanalysis.analysis.utils.StringView;
2312
import org.dataflowanalysis.converter.dfd2web.DataFlowDiagramAndDictionary;
2413
import org.dataflowanalysis.converter.dfd2web.DFD2WebConverter;
@@ -27,12 +16,9 @@
2716
import org.dataflowanalysis.converter.web2dfd.Web2DFDConverter;
2817
import org.dataflowanalysis.converter.web2dfd.WebEditorConverterModel;
2918
import org.dataflowanalysis.converter.web2dfd.model.WebEditorDfd;
30-
import org.dataflowanalysis.converter.web2dfd.model.Child;
31-
import org.dataflowanalysis.converter.web2dfd.model.Annotation;
3219
import org.dataflowanalysis.dfd.datadictionary.DataDictionary;
3320
import org.dataflowanalysis.dfd.datadictionary.datadictionaryPackage;
3421
import org.dataflowanalysis.dfd.dataflowdiagram.DataFlowDiagram;
35-
import org.dataflowanalysis.dfd.dataflowdiagram.Node;
3622
import org.dataflowanalysis.dfd.dataflowdiagram.dataflowdiagramPackage;
3723
import org.eclipse.emf.common.util.URI;
3824
import org.eclipse.emf.ecore.resource.Resource;
@@ -42,204 +28,112 @@
4228
import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;
4329

4430
public class Converter {
45-
46-
private static final Logger logger = Logger.getLogger(AnalysisConstraint.class);
47-
48-
/**
49-
* Convertes a DFD from the Ecore to the WebEditor Json representation
50-
* @param dfd File where DFD is saved
51-
* @param dd File where DD is saved
52-
* @return Created WebEditor Json representation
53-
*/
54-
public static WebEditorDfd convertDFD(File dfd, File dd){
55-
try {
56-
var converter = new DFD2WebConverter();
57-
58-
ResourceSet rs = new ResourceSetImpl();
59-
rs.getResourceFactoryRegistry().getExtensionToFactoryMap().put(Resource.Factory.Registry.DEFAULT_EXTENSION, new XMIResourceFactoryImpl());
60-
rs.getPackageRegistry().put(dataflowdiagramPackage.eNS_URI, dataflowdiagramPackage.eINSTANCE);
61-
rs.getPackageRegistry().put(datadictionaryPackage.eNS_URI, datadictionaryPackage.eINSTANCE);
31+
32+
private static final Logger logger = Logger.getLogger(Converter.class);
6233

63-
Resource ddResource = rs.getResource(URI.createFileURI(dd.toString()), true);
64-
Resource dfdResource = rs.getResource(URI.createFileURI(dfd.toString()), true);
65-
System.out.println(dd.toString());
66-
System.out.println(dfd.toString());
67-
EcoreUtil.resolveAll(rs);
68-
EcoreUtil.resolveAll(ddResource);
69-
EcoreUtil.resolveAll(dfdResource);
70-
DataFlowDiagramAndDictionary dfdAndDD = new DataFlowDiagramAndDictionary((DataFlowDiagram)dfdResource.getContents().get(0), (DataDictionary)ddResource.getContents().get(0));
71-
72-
var newJson = converter.convert(dfdAndDD);
73-
74-
return newJson.getModel();
75-
76-
} catch (Exception e) {
77-
e.printStackTrace();
78-
return null;
79-
}
80-
}
81-
82-
83-
/**
84-
* Convertes a Model in PCM representation into a WebEditor Json represenation
85-
* @param usageModelFile File where Usage Model is saved
86-
* @param allocationModelFile File where Allocation Model is saved
87-
* @param nodeCharacteristicsFile File where Node Characteristics Model is saved
88-
* @return Created WebEditor Json representation
89-
*/
90-
public static WebEditorDfd convertPCM(File usageModelFile, File allocationModelFile, File nodeCharacteristicsFile){
91-
try {
92-
var converter = new PCM2DFDConverter();
93-
var dfd = converter.convert(new PCMConverterModel(usageModelFile.toString(), allocationModelFile.toString(), nodeCharacteristicsFile.toString()));
94-
95-
96-
var dfdConverter = new DFD2WebConverter();
97-
dfdConverter.setTransposeFlowGraphFinder(DFDSimpleTransposeFlowGraphFinder.class);
98-
return dfdConverter.convert(dfd).getModel();
99-
100-
} catch (Exception e) {
101-
e.printStackTrace();
102-
}
103-
return null;
104-
}
105-
106-
/**
107-
* Analyzes a Model in WebEditor Json Representation and returns the analyzed Model
108-
* @param webEditorDfd Model to be analyzed
109-
* @return Analyzed Model
110-
*/
111-
public static WebEditorDfd analyzeAnnotate(WebEditorDfd webEditorDfd) {
112-
try {
113-
var webEditorconverter = new Web2DFDConverter();
114-
var dd = webEditorconverter.convert(new WebEditorConverterModel(webEditorDfd));
115-
var dfdConverter = new DFD2WebConverter();
116-
if (webEditorDfd.constraints() != null && !webEditorDfd.constraints().isEmpty()) {
117-
var constraints = parseConstraints(webEditorDfd);
118-
dfdConverter.setConditions(constraints);
119-
}
120-
var newJson = dfdConverter.convert(dd).getModel();
121-
if (webEditorDfd.constraints() != null && !webEditorDfd.constraints().isEmpty())
122-
newJson.constraints().addAll(webEditorDfd.constraints()); //Reapply constraints
123-
return newJson;
124-
} catch (Exception e) {
125-
e.printStackTrace();
126-
}
127-
return null;
128-
}
129-
130-
/**
131-
* Converts a model in WebEditor Json representation into the DFD metamodel representation and return the DFD files as a concatenated string
132-
* @param webEditorDfd model in WebEditor Json representation to be converted
133-
* @param name Name of the files to be created
134-
* @return Concatenation of DFD and DD files as string
135-
*/
136-
public static String convertToDFDandStringify(WebEditorDfd webEditorDfd, String name) {
137-
try {
138-
var converter = new Web2DFDConverter();
139-
var dfd = converter.convert(new WebEditorConverterModel(webEditorDfd));
140-
String tempDir = System.getProperty("java.io.tmpdir");
141-
var dfdFile = new File(tempDir, name + ".dataflowdiagram");
142-
var ddFile = new File(tempDir, name + ".datadictionary");
143-
dfd.save(dfdFile.getParent(), name);
144-
145-
String dfdContent = Files.readString(dfdFile.toPath());
146-
String ddContent = Files.readString(ddFile.toPath());
34+
/**
35+
* Convertes a DFD from the Ecore to the WebEditor Json representation
36+
* @param dfd File where DFD is saved
37+
* @param dd File where DD is saved
38+
* @return Created WebEditor Json representation
39+
*/
40+
public static WebEditorDfd convertDFD(File dfd, File dd){
41+
var converter = new DFD2WebConverter();
42+
43+
ResourceSet rs = new ResourceSetImpl();
44+
rs.getResourceFactoryRegistry().getExtensionToFactoryMap().put(Resource.Factory.Registry.DEFAULT_EXTENSION, new XMIResourceFactoryImpl());
45+
rs.getPackageRegistry().put(dataflowdiagramPackage.eNS_URI, dataflowdiagramPackage.eINSTANCE);
46+
rs.getPackageRegistry().put(datadictionaryPackage.eNS_URI, datadictionaryPackage.eINSTANCE);
14747

148-
dfdFile.delete();
149-
ddFile.delete();
150-
return dfdContent + "\n" + ddContent;
151-
152-
} catch (Exception e) {
153-
e.printStackTrace();
154-
return null;
155-
}
156-
}
157-
158-
private static List<AnalysisConstraint> parseConstraints(WebEditorDfd webEditorDfd) {
159-
return webEditorDfd.constraints().stream()
160-
.filter(it -> it.constraint() != null && !it.constraint().isEmpty())
161-
.map(it -> {
162-
var constraint = AnalysisConstraint.fromString(new StringView(it.constraint())).getResult();
163-
constraint.setName(it.name());
164-
return constraint;
165-
}).toList();
166-
}
167-
168-
private static List<DSLResult> runAnalysis(DataFlowDiagramAndDictionary dfd, List<AnalysisConstraint> constraints) {
169-
var analysis = new DFDDataFlowAnalysisBuilder()
170-
.standalone()
171-
.modelProjectName(" ")
172-
.useCustomResourceProvider(new DFDModelResourceProvider(dfd.dataDictionary(), dfd.dataFlowDiagram()))
173-
.build();
174-
48+
Resource ddResource = rs.getResource(URI.createFileURI(dd.toString()), true);
49+
Resource dfdResource = rs.getResource(URI.createFileURI(dfd.toString()), true);
50+
EcoreUtil.resolveAll(rs);
51+
EcoreUtil.resolveAll(ddResource);
52+
EcoreUtil.resolveAll(dfdResource);
53+
DataFlowDiagramAndDictionary dfdAndDD = new DataFlowDiagramAndDictionary((DataFlowDiagram)dfdResource.getContents().get(0), (DataDictionary)ddResource.getContents().get(0));
54+
55+
var newJson = converter.convert(dfdAndDD);
56+
57+
return newJson.getModel();
58+
}
59+
60+
61+
/**
62+
* Convertes a Model in PCM representation into a WebEditor Json represenation
63+
* @param usageModelFile File where Usage Model is saved
64+
* @param allocationModelFile File where Allocation Model is saved
65+
* @param nodeCharacteristicsFile File where Node Characteristics Model is saved
66+
* @return Created WebEditor Json representation
67+
*/
68+
public static WebEditorDfd convertPCM(File usageModelFile, File allocationModelFile, File nodeCharacteristicsFile){
69+
var converter = new PCM2DFDConverter();
70+
var dfd = converter.convert(new PCMConverterModel(usageModelFile.toString(), allocationModelFile.toString(), nodeCharacteristicsFile.toString()));
71+
72+
73+
var dfdConverter = new DFD2WebConverter();
74+
dfdConverter.setTransposeFlowGraphFinder(DFDSimpleTransposeFlowGraphFinder.class);
75+
return dfdConverter.convert(dfd).getModel();
76+
}
77+
78+
/**
79+
* Analyzes a Model in WebEditor Json Representation and returns the analyzed Model
80+
* @param webEditorDfd Model to be analyzed
81+
* @return Analyzed Model
82+
*/
83+
public static WebEditorDfd analyzeAnnotate(WebEditorDfd webEditorDfd) {
84+
var webEditorconverter = new Web2DFDConverter();
85+
var dd = webEditorconverter.convert(new WebEditorConverterModel(webEditorDfd));
86+
var dfdConverter = new DFD2WebConverter();
87+
if (webEditorDfd.constraints() != null && !webEditorDfd.constraints().isEmpty()) {
88+
var constraints = parseConstraints(webEditorDfd);
89+
dfdConverter.setConditions(constraints);
90+
}
91+
var newJson = dfdConverter.convert(dd).getModel();
92+
if (webEditorDfd.constraints() != null && !webEditorDfd.constraints().isEmpty())
93+
newJson.constraints().addAll(webEditorDfd.constraints()); //Reapply constraints
94+
return newJson;
95+
}
96+
97+
/**
98+
* Converts a model in WebEditor Json representation into the DFD metamodel representation and return the DFD files as a concatenated string
99+
* @param webEditorDfd model in WebEditor Json representation to be converted
100+
* @param name Name of the files to be created
101+
* @return Concatenation of DFD and DD files as string
102+
*/
103+
public static String convertToDFDandStringify(WebEditorDfd webEditorDfd, String name) {
104+
try {
105+
var converter = new Web2DFDConverter();
106+
var dfd = converter.convert(new WebEditorConverterModel(webEditorDfd));
107+
String tempDir = System.getProperty("java.io.tmpdir");
108+
var dfdFile = new File(tempDir, name + ".dataflowdiagram");
109+
var ddFile = new File(tempDir, name + ".datadictionary");
110+
dfd.save(dfdFile.getParent(), name);
111+
112+
String dfdContent = Files.readString(dfdFile.toPath());
113+
String ddContent = Files.readString(ddFile.toPath());
175114

176-
logger.setLevel(Level.DEBUG);
177-
178-
var tfg = analysis.findFlowGraphs();
179-
tfg.evaluate();
180-
181-
182-
return constraints.stream().flatMap(it -> it.findViolations(tfg).stream()).toList();
183-
}
184-
185-
private static WebEditorDfd annotateViolations(WebEditorDfd webEditorDfd, List<DSLResult> violations) {
186-
Map<Child, List<Annotation>> nodeToAnnotationMap = new HashMap<>();
187-
188-
for (int i = 0; i < violations.size(); i++) {
189-
var violation = violations.get(i);
190-
violation.getMatchedVertices().stream().forEach(it -> {
191-
var node = webEditorDfd.model().children().stream()
192-
.filter(child -> child.id().equals(((Node)it.getReferencedElement()).getId())).findFirst().orElseThrow();
193-
nodeToAnnotationMap.putIfAbsent(node, new ArrayList<>());
194-
String message = "Constraint " + violation.getName() + " violated";
195-
nodeToAnnotationMap.get(node).add(new Annotation(message, "bolt", stringToColorHex(message), violation.getTransposeFlowGraph().hashCode()));
196-
});
197-
}
198-
199-
List<Child> newChildren = new ArrayList<>();
200-
201-
for (Child child : webEditorDfd.model().children()) {
202-
if (nodeToAnnotationMap.containsKey(child)) {
203-
var annotations = child.annotations();
204-
205-
annotations.addAll(nodeToAnnotationMap.get(child));
206-
207-
var newChild = new Child(child.text(), child.labels(), child.ports(), child.id(), child.type(), null, null, annotations, child.children());
208-
newChildren.add(newChild);
209-
}
210-
}
211-
212-
var nodesToRemove = new ArrayList<Child>();
213-
214-
nodeToAnnotationMap.keySet().forEach(node -> {
215-
for (Child child : webEditorDfd.model().children()) {
216-
if (child.id().equals(node.id())) {
217-
nodesToRemove.add(child);
218-
break;
219-
}
220-
}
221-
});
222-
223-
webEditorDfd.model().children().removeAll(nodesToRemove);
224-
webEditorDfd.model().children().addAll(newChildren);
225-
226-
return webEditorDfd;
227-
}
228-
229-
private static String stringToColorHex(String input) {
230-
byte[] hash;
231-
try {
232-
MessageDigest md = MessageDigest.getInstance("MD5");
233-
hash = md.digest(input.getBytes(StandardCharsets.UTF_8));
234-
} catch (NoSuchAlgorithmException e) {
235-
hash = new byte[] {(byte)0x80, (byte)0x80, (byte)0x80, 0};
236-
}
237-
float hue = (hash[0] & 0xFF) / 255f;
238-
float saturation = 0.5f + ((hash[1] & 0xFF) / 255f) * 0.5f;
239-
float brightness = 0.3f + ((hash[2] & 0xFF) / 255f) * 0.5f;
240-
saturation = Math.max(0.5f, Math.min(saturation, 1.0f));
241-
brightness = Math.max(0.3f, Math.min(brightness, 0.8f));
242-
Color color = Color.getHSBColor(hue, saturation, brightness);
243-
return String.format("#%02X%02X%02X", color.getRed(), color.getGreen(), color.getBlue());
244-
}
115+
dfdFile.delete();
116+
ddFile.delete();
117+
return dfdContent + "\n" + ddContent;
118+
119+
} catch (IOException e) {
120+
e.printStackTrace();
121+
return "Error";
122+
}
123+
}
124+
125+
private static List<AnalysisConstraint> parseConstraints(WebEditorDfd webEditorDfd) {
126+
return webEditorDfd.constraints().stream()
127+
.filter(it -> it.constraint() != null && !it.constraint().isEmpty())
128+
.map(it -> {
129+
StringView string = new StringView("- " + it.name() + ": " + it.constraint());
130+
var constraint = AnalysisConstraint.fromString(string);
131+
if (constraint.failed()) {
132+
logger.error(constraint.getError());
133+
throw new IllegalArgumentException("Unable to parse constraint: " + it.name());
134+
}
135+
var constraint2 = constraint.getResult();
136+
return constraint2;
137+
}).toList();
138+
}
245139
}

0 commit comments

Comments
 (0)