find events in xray and radio
Created: 2010-03-16 15:07:39
Last updated: 2010-03-16 15:19:30
Search for events which have data from Rhessi and Phoenix2.
Preview
Run
Run this Workflow in the Taverna Workbench...
Workflow Components
Authors (0)
|  |
Titles (0)
|  |
Descriptions (0)
|  |
Dependencies (0)
|  |
Inputs (4)
|  |
Name |
Description |
date_start |
|
date_end |
|
GOES_min |
Goes filter parameter
xray_class > value
|
GOES_max |
Goes filter parameter - optional
Goes filter parameter
xray_class < value
Goes filter parameter
|
Processors (13)
|  |
Name |
Type |
Description |
sql |
wsdl |
Wsdlhttp://helio.i4ds.technik.fhnw.ch:8080/core/services/hec?wsdlWsdl Operationsql |
sql_input |
xmlsplitter |
|
sql_base |
stringconstant |
ValueSELECT * FROM goes_xray_flare WHERE time_start>='%start_date%' AND time_start<'%stop_date%' %goes% ORDER BY ntime_start; |
combine_sql_query |
beanshell |
ScriptString goes = new String("");
sql_string = sql_base.replace("%start_date%",start_date);
sql_string = sql_string.replace("%stop_date%",stop_date);
if(goes_min.length() > 0) {
goes = goes.concat(" AND xray_class > '"+goes_min+"'");
}
if (goes_max.length() > 0) {
goes = goes.concat(" AND xray_class < '"+goes_max+"'");
}
sql_string = sql_string.replace("%goes%",goes); |
getAllIventDates |
beanshell |
Scriptimport java.io.StringReader;
import org.w3c.dom.*;
import org.xml.sax.InputSource;
import org.apache.xerces.parsers.DOMParser;
private void getPositions(){
NodeList nodesVO = docVO.getElementsByTagName("FIELD");
for(int i=0; i< nodesVO.getLength(); i++){
Element voFieldNode = nodesVO.item(i);
String name = voFieldNode.getAttribute("name");
if(name.trim().equals("ntime_start")){
pos_start_vo = i;
} else if(name.trim().equals("ntime_end")){
pos_end_vo = i;
} else if(name.trim().equals("time_start")){
pos_startA_vo = i;
} else if(name.trim().equals("time_end")){
pos_endA_vo = i;
}
}
}
private void findDates() {
NodeList dataListVO = docVO.getElementsByTagName("TR");
debug= debug.concat(" length " + dataListVO.getLength()+ "\n");
for(int i = 0; i < dataListVO.getLength(); i++) {
debug= debug.concat(" i " + i+ "datalength " + dataListVO.getLength() +"\n" );
List date = new ArrayList();
NodeList voDataChilds = dataListVO.item(i).getChildNodes();
debug= debug.concat(" length2 " +voDataChilds.getLength()+ "\n");
if(voDataChilds.item(pos_start_vo).getFirstChild()!= null) {
debug= debug.concat("here 1");
date.add(voDataChilds.item(pos_start_vo).getFirstChild().getNodeValue());
startDates.add(voDataChilds.item(pos_start_vo).getFirstChild().getNodeValue());
debug= debug.concat("here 1a");
} else {
debug= debug.concat("here 2");
date.add(voDataChilds.item(pos_startA_vo).getFirstChild().getNodeValue());
startDates.add(voDataChilds.item(pos_startA_vo).getFirstChild().getNodeValue());
debug= debug.concat("here 2a");
}
if(voDataChilds.item(pos_end_vo).getFirstChild()!= null) {
debug= debug.concat("here 3");
date.add(voDataChilds.item(pos_end_vo).getFirstChild().getNodeValue());
endDates.add(voDataChilds.item(pos_end_vo).getFirstChild().getNodeValue());
debug= debug.concat("here 3a");
} else {
debug= debug.concat("here 4");
date.add(voDataChilds.item(pos_endA_vo).getFirstChild().getNodeValue());
endDates.add(voDataChilds.item(pos_endA_vo).getFirstChild().getNodeValue());
debug= debug.concat("here 4a");
}
date.add(i);
positions.add(i);
dateList.add(date);
debug= debug.concat(" end " +i + " \n");
}
}
List dateList = new ArrayList();
List startDates = new ArrayList();
List endDates = new ArrayList();
List positions = new ArrayList();
String debug = "debug";
StringReader reader2 = new StringReader(voTable);
InputSource source2 = new InputSource(reader2);
Document docVO;
int pos_start_vo;
int pos_end_vo;
int pos_startA_vo;
int pos_endA_vo;
try {
DOMParser parser = new DOMParser();
parser.parse(source2);
docVO = parser.getDocument();
this.getPositions();
this.findDates();
reader2.close();
if(dateList.size()==0){
dateList.add(new ArrayList());
}
if(endDates.size()==0){
endDates.add("2008-00-00 00:00:01");
}
if(startDates.size()==0){
startDates.add("2008-00-00 00:00:00");
}
if(positions.size()==0){
positions.add("0");
}
}
catch (Exception e){
}
|
query_v1_hessi_ec |
wsdl |
Wsdlhttp://helio.i4ds.technik.fhnw.ch:8080/core/services/frontend?wsdlWsdl Operationquery_v1 |
query_v1_hessi_ec_input |
xmlsplitter |
|
combineData |
beanshell |
Scriptimport java.io.StringReader;
import org.w3c.dom.*;
import org.xml.sax.InputSource;
import org.apache.xerces.parsers.DOMParser;
import java.io.StringWriter;
import java.util.Date;
import java.text.SimpleDateFormat;
import java.lang.*;
private void write(Node node, String indent) {
// The output depends on the type of the node
switch(node.getNodeType()) {
case Node.DOCUMENT_NODE: { // If its a Document node
Document doc = (Document)node;
out.write(indent + "\n"); // Output header
Node child = doc.getFirstChild(); // Get the first node
while(child != null) { // Loop 'till no more nodes
write(child, indent); // Output node
child = child.getNextSibling(); // Get next node
}
break;
}
case Node.DOCUMENT_TYPE_NODE: { // It is a tag
DocumentType doctype = (DocumentType) node;
// Note that the DOM Level 1 does not give us information about
// the the public or system ids of the doctype, so we can't output
// a complete tag here. We can do better with Level 2.
out.write("\n");
break;
}
case Node.ELEMENT_NODE: { // Most nodes are Elements
Element elt = (Element) node;
out.write(indent + "<" + elt.getTagName()); // Begin start tag
NamedNodeMap attrs = elt.getAttributes(); // Get attributes
for(int i = 0; i < attrs.getLength(); i++) { // Loop through them
Node a = attrs.item(i);
out.write(" " + a.getNodeName() + "='" + // Print attr. name
fixup(a.getNodeValue()) + "'"); // Print attr. value
}
out.write(">\n"); // Finish start tag
String newindent = indent + " "; // Increase indent
Node child = elt.getFirstChild(); // Get child
while(child != null) { // Loop
write(child, newindent); // Output child
child = child.getNextSibling(); // Get next child
}
out.write(indent + "\n");
break;
}
case Node.TEXT_NODE: { // Plain text node
Text textNode = (Text)node;
String text = textNode.getData().trim(); // Strip off space
if ((text != null) && text.length() > 0) // If non-empty
out.write(indent + fixup(text)+"\n"); // print text
break;
}
case Node.PROCESSING_INSTRUCTION_NODE: { // Handle PI nodes
ProcessingInstruction pi = (ProcessingInstruction)node;
out.write(indent + "\n");
break;
}
case Node.ENTITY_REFERENCE_NODE: { // Handle entities
out.write(indent + "&" + node.getNodeName() + ";\n");
break;
}
case Node.CDATA_SECTION_NODE: { // Output CDATA sections
CDATASection cdata = (CDATASection)node;
// Careful! Don't put a CDATA section in the program itself!
out.write(indent + "<" + "![CDATA[" + cdata.getData() +
"]]" + ">\n");
break;
}
case Node.COMMENT_NODE: { // Comments
Comment c = (Comment)node;
out.write(indent + "\n");
break;
}
default: // Hopefully, this won't happen too much!
System.err.println("Ignoring node: " + node.getClass().getName());
break;
}
}
// This method replaces reserved characters with entities.
private String fixup(String s) {
StringBuffer sb = new StringBuffer();
int len = s.length();
for(int i = 0; i < len; i++) {
char c = s.charAt(i);
switch(c) {
default: sb.append(c); break;
case '<': sb.append("<"); break;
case '>': sb.append(">"); break;
case '&': sb.append("&"); break;
case '"': sb.append("""); break;
case '\'': sb.append("'"); break;
}
}
return sb.toString();
}
private void addTableHeaders(List instrument, String prefix){
NodeList nodes=null;
StringReader reader;
InputSource source;
for(int i = 0; i < instrument.size(); i++) {
reader = new StringReader(instrument.get(i));
source = new InputSource(reader);
parser.parse(source);
doc=parser.getDocument();
nodes = doc.getElementsByTagName("return");
if(nodes!= null && nodes.getLength()>=1){
break;
} else {
reader.close();
}
}
NodeList nodesVO = docVO.getElementsByTagName("TABLE");
if(nodesVO.getLength() >= 1){
Node voTableNode = nodesVO.item(0);
if(nodes != null && nodes.getLength()>= 1){
Node observation = nodes.item(0);
NodeList obsChilds = observation.getChildNodes();
for(int i=0; i< obsChilds.getLength(); i++){
Element voFieldNode = docVO.createElement("FIELD");
voFieldNode.setAttribute("name",prefix.concat(obsChilds.item(i).getLocalName()));
voFieldNode.setAttribute("datatype","char");
voFieldNode.setAttribute("arraysize","3400");
voTableNode.insertBefore(voFieldNode, voTableNode.getLastChild());
}
}
}
reader.close();
}
private writeExtraFields(Node nodeVO, Node nodeHessi) {
String debug = new String("");
NodeList listHessi = nodeHessi.getChildNodes();
for(int i=0; i< listHessi.getLength(); i++) {
Node newNode = nodeVO.getFirstChild().cloneNode(false);
Text voTextNode = docVO.createTextNode(listHessi.item(i).getFirstChild().getNodeValue());
newNode.appendChild(voTextNode);
nodeVO.appendChild(newNode);
}
}
private String findOverlaps() {
NodeList nodesHessi=null;
NodeList nodesPhoenix=null;
StringReader reader;
StringReader readerPhoenix;
InputSource source;
InputSource sourcePhoenix;
String debug = new String("");
NodeList tableListVO = docVO.getElementsByTagName("TABLEDATA");
NodeList dataListVO = docVO.getElementsByTagName("TR");
if(dataListVO == null) {
debug="dataListVO = null";
return debug;
}
SimpleDateFormat date = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
if(instrument_data_hessi.size() != instrument_data_phoenix.size()) {
throw new Exception("data list lengths different -> not from same periodes");
}
for(int i = instrument_data_hessi.size()-1; i>=0; i--) {
reader = new StringReader(instrument_data_hessi.get(i));
readerPhoenix = new StringReader(instrument_data_phoenix.get(i));
source = new InputSource(reader);
sourcePhoenix = new InputSource(readerPhoenix);
parser.parse(source);
parserPhoenix.parse(sourcePhoenix);
doc=parser.getDocument();
docPhoenix=parserPhoenix.getDocument();
NodeList dataListHessi = doc.getElementsByTagName("measurementStart");
if(dataListHessi == null){
dataListHessi = new NodeList();
}
NodeList dataListHessi2 = doc.getElementsByTagName("measurementEnd");
NodeList dataListPhoenix = docPhoenix.getElementsByTagName("measurementStart");
if(dataListePhoenix == null) {
dataListPhoenix = new NodeList();
}
int pos = Integer.parseInt(position.get(i));
Node voDataNode = dataListVO.item(pos);
if(voDataNode == null)
{
reader.close();
readerPhoenix.close();
return debug;
}
Node parent = voDataNode.getParentNode();
for(int j = 0; j < dataListHessi.getLength(); j++) {
Date startHessi = date.parse(dataListHessi.item(j).getFirstChild().getNodeValue());
Date endHessi = date.parse(dataListHessi2.item(j).getFirstChild().getNodeValue());
for(int k = 0; k < dataListPhoenix.getLength(); k++) {
Date startPhoenix = date.parse(dataListPhoenix.item(k).getFirstChild().getNodeValue());
if(startHessi.getTime()<= startPhoenix.getTime() && endHessi.getTime()>=startPhoenix.getTime()){
Node voDataNodeCurrent = voDataNode.cloneNode(true);
writeExtraFields(voDataNodeCurrent, doc.getElementsByTagName("return").item(j));
writeExtraFields(voDataNodeCurrent, docPhoenix.getElementsByTagName("return").item(k));
parent.insertBefore(voDataNodeCurrent, voDataNode);
}
}
}
parent.removeChild(voDataNode);
reader.close();
readerPhoenix.close();
}
return debug;
}
StringWriter out= new StringWriter();
StringReader reader2 = new StringReader(voTable);
InputSource source2 = new InputSource(reader2);
DOMParser parser = new DOMParser();
DOMParser parserPhoenix = new DOMParser();
DOMParser parser2 = new DOMParser();
VOTable_out = new String("");
Document docVO;
Document doc;
Document docPhoenix;
try {
parser2.parse(source2);
docVO = parser2.getDocument();
this.addTableHeaders(instrument_data_hessi, "hessi_");
this.addTableHeaders(instrument_data_phoenix, "phoenix2_");
VOTable_out = VOTable_out.concat(this.findOverlaps());
this.write(docVO.getDocumentElement(),"");
VOTable_out = VOTable_out.concat(out.toString());
reader2.close();
}
catch (Exception e){
e.printStackTrace();
StackTraceElement[] stacktrace = e.getStackTrace();
for(int i =0 ; i< stacktrace.length; i++) {
VOTable_out = VOTable_out.concat(stacktrace[i].toString()+"\n");
}
}
|
sql_output |
xmlsplitter |
|
hessi_ec |
stringconstant |
Valuehessi-ec |
phoenix2 |
stringconstant |
Valuephoenix2 |
query_v1_phoenix2 |
wsdl |
Wsdlhttp://helio.i4ds.technik.fhnw.ch:8080/core/services/frontend?wsdlWsdl Operationquery_v1 |
query_v1_phoenix2_input |
xmlsplitter |
|
Beanshells (3)
|  |
Name |
Description |
Inputs |
Outputs |
combine_sql_query |
|
start_date
stop_date
sql_base
goes_min
goes_max
|
sql_string
|
getAllIventDates |
|
voTable
|
startDates
endDates
positions
|
combineData |
|
voTable
position
instrument_data_hessi
instrument_data_phoenix
|
VOTable_out
|
Outputs (1)
|  |
Datalinks (22)
|  |
Source |
Sink |
sql_input:output |
sql:parameters |
combine_sql_query:sql_string |
sql_input:arg0 |
date_end |
combine_sql_query:stop_date |
date_start |
combine_sql_query:start_date |
sql_base:value |
combine_sql_query:sql_base |
GOES_min |
combine_sql_query:goes_min |
GOES_max |
combine_sql_query:goes_max |
sql_output:return |
getAllIventDates:voTable |
query_v1_hessi_ec_input:output |
query_v1_hessi_ec:parameters |
hessi_ec:value |
query_v1_hessi_ec_input:instrument |
getAllIventDates:endDates |
query_v1_hessi_ec_input:date_to |
getAllIventDates:startDates |
query_v1_hessi_ec_input:date_from |
sql_output:return |
combineData:voTable |
query_v1_phoenix2:parameters |
combineData:instrument_data_phoenix |
query_v1_hessi_ec:parameters |
combineData:instrument_data_hessi |
getAllIventDates:positions |
combineData:position |
sql:parameters |
sql_output:input |
query_v1_phoenix2_input:output |
query_v1_phoenix2:parameters |
phoenix2:value |
query_v1_phoenix2_input:instrument |
getAllIventDates:endDates |
query_v1_phoenix2_input:date_to |
getAllIventDates:startDates |
query_v1_phoenix2_input:date_from |
combineData:VOTable_out |
VOTable |
Coordinations (0)
|  |
Uploader
License
All versions of this Workflow are
licensed under:
Version 1 (earliest)
(of 7)
Credits (1)
(People/Groups)
Attributions (0)
(Workflows/Files)
None
Shared with Groups (1)
Featured In Packs (0)
None
Log in to add to one of your Packs
Attributed By (1)
(Workflows/Files)
Favourited By (0)
No one
Statistics
Citations (0)Version History
Other workflows that use similar services
(5)
Only the first 2 workflows that use similar services are shown. View all workflows that use these services.
DPAS general query
(1)
general query to DPAS; it gets a list of instruments and lists for start and stop times;
for a query hessiEC, phoenix2; start times 2001-01-01 00:00:00, 2005-04-01 00:00:00; stop times 2001-01-05 00:00:00, 2005-05-1 00:00:00 it would perform to queries:
hessiEC from 2001-01-01 00:00:00 to 2001-01-05 00:00:00 and phoenix2 from 2005-04-01 00:00:00 to 2005-05-1 00:00:00
There is only one output file with the results from both queries.
Created: 2010-04-19
| Last updated: 2010-04-19
Credits:
Anja Le Blanc
No comments yet
Log in to make a comment