% This file was created with JabRef 2.10. % Encoding: UTF8 @Conference{bastin2012tools, Title = {Tools for proactive collection and use of quality metadata in GEOSS}, Author = {Bastin, Lucy and Thum, Simon and Mas\'{o}, Joan and Yang, Kevin X and N{\"u}st, Daniel and Van den Broek, Maud and Lush, Victoria and Papeschi, Fabrizio and Riverola, Anna}, author+an = {5=highlight}, Booktitle = {AGU General Assembly}, Year = {2012}, Organization = {American Geophysical Union}, Pages = {03}, Volume = {1}, Abstract = {The GEOSS Common Infrastructure allows interactive evaluation and selection of Earth Observation datasets by the scientific community and decision makers, but the data quality information needed to assess fitness for use is often patchy and hard to visualise when comparing candidate datasets. In a number of studies over the past decade, users repeatedly identified the same types of gaps in quality metadata, specifying the need for enhancements such as peer and expert review, better traceability and provenance information, information on citations and usage of a dataset, warning about problems identified with a dataset and potential workarounds, and 'soft knowledge' from data producers (e.g. recommendations for use which are not easily encoded using the existing standards). Despite clear identification of these issues in a number of recommendations, the gaps persist in practice and are highlighted once more in our own, more recent, surveys. This continuing deficit may well be the result of a historic paucity of tools to support the easy documentation and continual review of dataset quality. However, more recent developments in tools and standards, as well as more general technological advances, present the opportunity for a community of scientific users to adopt a more proactive attitude by commenting on their uses of data, and for that feedback to be federated with more traditional and static forms of metadata, allowing a user to more accurately assess the suitability of a dataset for their own specific context and reliability thresholds. The EU FP7 GeoViQua project aims to develop this opportunity by adding data quality representations to the existing search and visualisation functionalities of the Geo Portal. Subsequently we will help to close the gap by providing tools to easily create quality information, and to permit user-friendly exploration of that information as the ultimate incentive for improved data quality documentation. Quality information is derived from producer metadata, from the data themselves, from validation of in-situ sensor data, from provenance information and from user feedback, and will be aggregated to produce clear and useful summaries of quality, including a GEO Label. GeoViQua's conceptual quality information models for users and producers are specifically described and illustrated in this presentation. These models (which have been encoded as XML schemas and can be accessed at http://schemas.geoviqua.org/) are designed to satisfy the identified user needs while remaining consistent with current standards such as ISO 19115 and advanced drafts such as ISO 19157. The resulting components being developed for the GEO Portal are designed to lower the entry barrier to users who wish to help to generate and explore rich and useful metadata. This metadata will include reviews, comments and ratings, reports of usage in specific domains and specification of datasets used for benchmarking, as well as rich quantitative information encoded in more traditional data quality elements such as thematic correctness and positional accuracy. The value of the enriched metadata will also be enhanced by graphical tools for visualizing spatially distributed uncertainties. We demonstrate practical example applications in selected environmental application domains.}, Comment = {Fall Meeting 2012, abstract #IN13D-03}, Journal = {AGU Fall Meeting Abstracts}, Keywords = {presentation}, Owner = {Daniel}, Timestamp = {2014.10.11} } @Article{s110807568, Title = {Semantically-Enabled Sensor Plug \& Play for the Sensor Web}, Author = {Br{\"o}ring, Arne and Mau\'{e}, Patrick and Janowicz, Krzysztof and N{\"u}st, Daniel and Malewski, Christian}, author+an = {4=highlight}, Journal = {Sensors}, Year = {2011}, Number = {8}, Pages = {7568--7605}, Volume = {11}, Abstract = {Environmental sensors have continuously improved by becoming smaller, cheaper, and more intelligent over the past years. As consequence of these technological advancements, sensors are increasingly deployed to monitor our environment. The large variety of available sensor types with often incompatible protocols complicates the integration of sensors into observing systems. The standardized Web service interfaces and data encodings defined within OGC’s Sensor Web Enablement (SWE) framework make sensors available over the Web and hide the heterogeneous sensor protocols from applications. So far, the SWE framework does not describe how to integrate sensors on-the-fly with minimal human intervention. The driver software which enables access to sensors has to be implemented and the measured sensor data has to be manually mapped to the SWE models. In this article we introduce a Sensor Plug & Play infrastructure for the Sensor Web by combining (1) semantic matchmaking functionality, (2) a publish/subscribe mechanism underlying the SensorWeb, as well as (3) a model for the declarative description of sensor interfaces which serves as a generic driver mechanism. We implement and evaluate our approach by applying it to an oil spill scenario. The matchmaking is realized using existing ontologies and reasoning engines and provides a strong case for the semantic integration capabilities provided by Semantic Web research.}, Doi = {10.3390/s110807568}, ISSN = {1424-8220}, Keywords = {Sensor Web Enablement, Sensor Plug & Play, sensor integration, Semantic Web, sensor bus, sensor interface descriptors, semantic matching, ontology alignment}, Owner = {Daniel}, Timestamp = {2011.08.02}, Url = {http://www.mdpi.com/1424-8220/11/8/7568/} } @Conference{demuth2013airquality, Title = {The AirQuality SenseBox}, Author = {Demuth, Dustin and N{\"u}st, Daniel and Br{\"o}ring, Arne and Pebesma, Edzer}, author+an = {2=highlight}, Booktitle = {EGU General Assembly Conference Abstracts}, Year = {2013}, Pages = {5146}, Volume = {15}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://meetingorganizer.copernicus.org/EGU2013/EGU2013-5146.pdf} } @InProceedings{foerster2012mobile, Title = {{Discovering the Sensor Web through Mobile Applications}}, Author = {Foerster, Theodor and N{\"u}st, Daniel and Br{\"o}ring, Arne and Jirka, Simon and Trajkovski, Klemen Kozmus and Petrovi{\v{c}}, Du{\v{s}}an and Sterle, Oskar}, author+an = {2=highlight}, Booktitle = {Advances in Location-Based Services}, Year = {2012}, Organization = {8th International Symposium on Location-Based Services}, Pages = {211-224}, Series = {Springer Lecture Notes in Geoinformation and Cartography}, Doi = {10.1007/978-3-642-24198-7_14}, Journal = {Advances in Location-Based Services}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://link.springer.com/book/10.1007/978-3-642-24198-7#page=211} } @InProceedings{hinz2013, Title = {{Spatial Statistics on the Geospatial Web}}, Author = {Hinz, Matthias and N{\"u}st, Daniel and Pro{\ss}, Benjamin and Pebesma, Edzer}, author+an = {2=highlight}, Booktitle = {The 16th AGILE International Conference on Geographic Information Science, Short Papers}, Year = {2013}, Abstract = {The Geospatial Web provides data as well as processing functionality using web interfaces. Typical examples of such processes are models and predictions for spatial data, known as spatial statistics. Such analyses are written by domain experts in scripting languages and rarely exposed as web services. We present a concept of script annotations for automatic deployment in server runtime environments and demonstrate it with an implementation based on the open standards and open source components OGC Web Processing Service and R.}, Journal = {AGILE}, Keywords = {Geoprocessing, R, spatial statistics, WPS, Geospatial Web}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://www.agile-online.org/Conference_Paper/CDs/agile_2013/Short_Papers/SP_S3.1_Hinz.pdf} } @Article{Hofer2015, Title = {Descriptions of Spatial Operations – Recent Approaches and Community Feedback}, Author = {Hofer, Barbara and Brauner, Johannes and Jackson, Mike and Granell, Carlos and Rodrigues, Armanda and N{\"u}st, Daniel and Wiemann, Stefan}, author+an = {6=highlight}, Journal = {International Journal of Spatial Data Infrastructures Research}, Year = {2015}, Pages = {124-137}, Volume = {10}, Abstract = {Progress in the technical provision of spatial operations as loosely-coupled interoperable web services requires a corresponding development of standardisation in their description. Operation discovery, usage and interpretation of results require more information on what a spatial operation does than just their input and output interface specifications. Geooperators and WPS profiles have been proposed for addressing operation descriptions for different operational perspectives. Geooperators have been developed mostly for supporting operation discovery through defining alternative perspectives such as a geodata, legacy GIS, formal or technical perspective. These act as filters in the discovery process. WPS profiles provide a hierarchical approach to define the concept underlying an operation and, in more specific profiles, the syntactic interface of the operation. Both approaches require community engagement for reaching an agreed set of documented operations. We report on a discussion of these approaches and the larger framework of a geoprocessing community platform from a workshop held at the AGILE International Conference on Geographic Information Science in Lisbon in 2015. At the workshop two presentations provided insights in different contexts of use of online geoprocessing. After detailed introductions to the two operation descriptions approaches, two breakout sessions were held. In the breakout sessions operation descriptions and technical developments in the field were discussed. This article summarizes the discussion that took place at the workshop with the intention to involve the extended community in the discourse on operation descriptions.}, Doi = {10.2902/1725-0463.2015.10.art6}, Owner = {daniel}, Timestamp = {2016.02.05}, Url = {http://ijsdir.jrc.ec.europa.eu/index.php/ijsdir/article/view/388} } @Book{Houbie2009, Title = {{OGC Discussion Paper 09-163 - OGC Catalogue Services Specification 2.0 - Extension Package for ebRIM Application Profile: SensorML}}, Author = {Houbie, F. and Skivee, F. and Robin, A. and Jirka, Simon and Br{\"o}ring, Arne and N{\"u}st, Daniel}, author+an = {3=highlight}, Publisher = {Open Geospatial Consortium}, Year = {2009}, Timestamp = {2009.04.12}, Url = {http://portal.opengeospatial.org/files/?artifact_id=37944} } @Book{JirkaSOR2010, Title = {{OGC Discussion Paper 09-112r1 - Sensor Observable Registry}}, Author = {Jirka, Simon and Br{\"o}ring, Arne and N{\"u}st, Daniel}, author+an = {3=highlight}, Publisher = {Open Geospatial Consortium}, Year = {2010}, Owner = {daniel}, Timestamp = {2009.07.30}, Url = {http://portal.opengeospatial.org/files/?artifact_id=37944} } @Book{JirkaSIR2010, Title = {{OGC Discussion Paper 10-171: Sensor Instance Registry}}, Author = {Jirka, Simon and N{\"u}st, Daniel}, author+an = {2=highlight}, Publisher = {Open Geospatial Consortium}, Year = {2010}, Owner = {daniel}, Timestamp = {2009.04.08}, Url = {http://portal.opengeospatial.org/files/?artifact_id=40609} } @Conference{Jirka2013, Title = {{Sensor Web and Web Processing for Crisis Management}}, Author = {Jirka, Simon and N{\"u}st, Daniel and Pro{\ss}, Benjamin}, author+an = {2=highlight}, Booktitle = {Proceedings of the 10 th International ISCRAM Conference}, Year = {2013}, Address = {Baden-Baden, Germany}, Editor = {T. Comes and F. Fiedrich and S. Fortier and J. Geldermann and T. M{\"u}ller}, Month = {5}, Organization = {ISCRAM}, Pages = {376-380}, Abstract = {This paper introduces the latest state of the interoperable Sensor Web and Web Processing standards specified by the Open Geospatial Consortium. Based on these components it becomes possible to share, integrate and analyse observation data across political and administrative borders as well as acros s multiple thematic domains. We present the 52°North open source implementations of the OGC SWE and WPS standards and introduce an outlook how this technology could be applied in the field of crisis management. Thus, this paper aims at providing a perspective how currently existing technology can be combined and applied to solve problems in emergency management rather than describing an already finished product. Special consideration will be given to the combination of Sensor Web and Web Processing technology which opens up new possibilities by having near real-time data flows that can be linked on-demand to different processing services.}, Keywords = {Interoperability, Sensor Web, OGC Sensor Web Enablement, OGC Web Processing Service, abstract}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://www.iscramlive.org/ISCRAM2013/files/271.pdf} } @InProceedings{Jirka2010, Title = {{Integrating the OGC Sensor Web Enablement Framework into the OGC Catalogue}}, Author = {Jirka, Simon and N{\"u}st, Daniel and Schulte, Jan and Houbie, Frederic}, author+an = {2=highlight}, Booktitle = {{WebMGS 2010: 1st International Workshop on Pervasive Web Mapping, Geoprocessing and Services. 26.-27. August 2010. Como, Italy.}}, Year = {2010}, Abstract = {The Sensor Web Enablement (SWE) architecture of the Open Geospatial Consortium (OGC) has reached a broad acceptance. However, although the core specifications reached a mature state, the integration of a cataloguing service for sensors has not yet been achieved. Previous work focused on registries that address the specifics of dynamic sensor networks, on mechanisms for handling the semantics of phenomena and on metadata models based on the Sensor Model Language (SensorML). This work describes how existing elements supporting sensor discovery can be coupled with the already well established OGC Catalogue Service (CSW). The approach presented in this work relies on a SensorML profile specifying metadata necessary and sufficient for sensor discovery. SensorML documents that conform to the profile are automatically harvested from SWE services by a lower level registry and are subsequently transformed into an information model supported by the CSW. Finally the metadata is pushed into CSW instances and becomes available through the CSW interface. In summary, this work presents for the first time a working example how resources provided through SWE services can automatically be published through common OGC Catalogue Service instances. We expect that the presented approach is an important step in order to achieve a full integration of SWE components into spatial data infrastructures and to offer SWE services to a broader audience.}, Owner = {Daniel}, Timestamp = {2011.05.09}, Url = {http://www.isprs.org/proceedings/XXXVIII/4%2DW13/ID_13.pdf} } @Conference{Jirka2015, Title = {Applying Sensor Web Technology to Marine Sensor Data}, Author = {Jirka, Simon and del Rio, Joaquin and Toma, Daniel Mihai and N{\"u}st, Daniel and Stasch, Christoph and Delory, Eric}, author+an = {4=highlight}, Booktitle = {EGU General Assembly Conference Abstracts}, Year = {2015}, Organization = {European Geophysical Union}, Pages = {9069}, Series = {Geophysical Research Abstracts}, Volume = {17}, Abstract = {In this contribution we present two activities illustrating how Sensor Web technology helps to enable a flexible and interoperable sharing of marine observation data based on standards. An important foundation is the Sensor Web Architecture developed by the European FP7 project NeXOS (Next generation Low-Cost Multifunctional Web Enabled Ocean Sensor Systems Empowering Marine, Maritime and Fisheries Management). This architecture relies on the Open Geospatial Consortium's (OGC) Sensor Web Enablement (SWE) framework. It is an exemplary solution for facilitating the interoperable exchange of marine observation data within and between (research) organisations. The architecture addresses a series of functional and non-functional requirements which are fulfilled through different types of OGC SWE components. The diverse functionalities offered by the NeXOS Sensor Web architecture are shown in the following overview: - Pull-based observation data download: This is achieved through the OGC Sensor Observation Service (SOS) 2.0 interface standard. - Push-based delivery of observation data to allow users the subscription to new measurements that are rel- evant for them: For this purpose there are currently several specification activities under evaluation (e.g. OGC Sensor Event Service, OGC Publish/Subscribe Standards Working Group). - (Web-based) visualisation of marine observation data: Implemented through SOS client applications. - Configuration and controlling of sensor devices: This is ensured through the OGC Sensor Planning Ser- vice 2.0 interface. - Bridging between sensors/data loggers and Sensor Web components: For this purpose several components such as the "Smart Electronic Interface for Sensor Interoperability" (SEISI) concept are developed; this is com- plemented by a more lightweight SOS extension (e.g. based on the W3C Efficient XML Interchange (EXI) format). To further advance this architecture, there is on-going work to develop dedicated profiles of selected OGC SWE specifications that provide stricter guidance how these standards shall be applied to marine data (e.g. SensorML 2.0 profiles stating which metadata elements are mandatory building upon the ESONET Sensor Registry developments, etc.). Within the NeXOS project the presented architecture is implemented as a set of open source components. These implementations can be re-used by all interested scientists and data providers needing tools for publishing or consuming oceanographic sensor data. In further projects such as the European project FixO3 (Fixed-point Open Ocean Observatories), these soft- ware development activities are complemented with additional efforts to provide guidance how Sensor Web technology can be applied in an efficient manner. This way, not only software components are made available but also documentation and information resources that help to understand which types of Sensor Web deployments are best suited to fulfil different types of user requirements.}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2015.06.01}, Url = {http://meetingorganizer.copernicus.org/EGU2015/EGU2015-9069.pdf} } @Conference{lush2014geo, Title = {{GEO Label Web Services for Dynamic and Effective Communication of Geospatial Metadata Quality}}, Author = {Lush, Victoria and N{\"u}st, Daniel and Bastin, Lucy and Mas{\'o}, Joan and Lumsden, Jo}, author+an = {2=highlight}, Booktitle = {EGU General Assembly Conference Abstracts}, Year = {2014}, Pages = {14453}, Volume = {16}, Abstract = {We present demonstrations of the GEO label Web services and their integration into a proto- type extension of the GEOSS portal (http://scgeoviqua.sapienzaconsulting.com/web/guest/geo_home), the GMU portal (http://gis.csiss.gmu.edu/GADMFS/) and a GeoNetwork catalog application (http://uncertdata.aston.ac.uk:8080/geonetwork/srv/eng/main.home). The GEO label is designed to commu- nicate, and facilitate interrogation of, geospatial quality information with a view to supporting efficient and effective dataset selection on the basis of quality, trustworthiness and fitness for use. The GEO label which we propose was developed and evaluated according to a user-centred design (UCD) approach in order to maximise the likelihood of user acceptance once deployed. The resulting label is dynamically generated from producer metadata in ISO or FDGC format, and incorporates user feedback on dataset usage, ratings and discovered issues, in order to supply a highly informative summary of metadata completeness and quality. The label was easily incorporated into a community portal as part of the GEO Architecture Implementation Programme (AIP-6) and has been successfully integrated into a prototype extension of the GEOSS portal, as well as the popular metadata catalog and editor, GeoNetwork. The design of the GEO label was based on 4 user studies conducted to: (1) elicit initial user requirements; (2) investigate initial user views on the concept of a GEO label and its potential role; (3) evaluate prototype label visualizations; and (4) evaluate and validate physical GEO label prototypes. The results of these studies indicated that users and producers support the concept of a label with drill-down interrogation facility, combining eight geospatial data informational aspects, namely: producer profile, producer comments, lineage information, standards compliance, quality information, user feedback, expert reviews, and citations information. These are delivered as eight facets of a wheel-like label, which are coloured according to metadata availability and are clickable to allow a user to engage with the original metadata and explore specific aspects in more detail. To support this graphical representation and allow for wider deployment architectures we have implemented two Web services, a PHP and a Java implementation, that generate GEO label representations by combining producer metadata (from standard catalogues or other published locations) with structured user feedback. Both services accept encoded URLs of publicly available metadata documents or metadata XML files as HTTP POST and GET requests and apply XPath and XSLT mappings to transform producer and feedback XML documents into clickable SVG GEO label representations. The label and services are underpinned by two XML-based quality models. The first is a producer model that extends ISO 19115 and 19157 to allow fuller citation of reference data, presentation of pixel- and dataset- level statistical quality information, and encoding of traceability information on the lineage of an actual quality assessment. The second is a user quality model (realised as a feedback server and client) which allows reporting and query of ratings, usage reports, citations, comments and other domain knowledge. Both services are Open Source and are available on GitHub at https://github.com/lushv/geolabel- service and https://github.com/52North/GEO-label-java. The functionality of these services can be tested using our GEO label generation demos, available online at http://www.geolabel.net/demo.html and http://geoviqua.dev.52north.org/glbservice/index.jsf.}, Keywords = {abstract}, Owner = {daniel}, Timestamp = {2014.10.11}, Url = {http://meetingorganizer.copernicus.org/EGU2014/EGU2014-14453.pdf} } @Conference{MasoPau2013, Title = {{Eliciting Well-Formed Quality Indicators And Metadata In GEOSS Earth Observation Products}}, Author = {Mas\'{o} Pau, Joan and Sevillano, Eva and Bastin, Lucy and Blower, Jon and Smeets, Joost and N{\"u}st, Daniel and Bigagli, Lorenzo and Thum, Simon and Guidetti, Veronica and Evano, Pascal and Alameh, Nadine}, author+an = {6=highlight}, Booktitle = {ESA Living Planet Symposium}, Year = {2013}, Month = {9}, Organization = {ESA}, Keywords = {presentation}, Owner = {daniel}, Timestamp = {2014.10.11} } @Conference{Maso2014, Title = {{Including data quality concepts into the GEOSS Portal}}, Author = {Mas\'{o}, Joan and N{\"u}st, Daniel and Diaz, Daniel and Closa, Guillem and Lush, Victoria and Zabala, Alaitz and Nativi, Stefano}, author+an = {2=highlight}, Booktitle = {{Poster Session ESSI2.12: Real Use of Standards and Technologies - Live Demonstrations}}, Year = {2014}, Editor = {John van de Vegte and David Arctur and George Percivall and Joan Mas\'{o}}, Number = {EGU2014-15739}, Organization = {European Geosciences Union}, Volume = {16}, Abstract = {The EC FP7 GeoViQua project has focussed in including quality information into the GEO Portal to increase the visibility of it and increase trust in the GEOSS data products. GeoViQua has fully adopted and extended the ISO19115 metadata model and the XML encoding, incorporating the ISO19157 concepts as new XML elements (pending the official XML encoding) but adding new elements detected as missing aspects in the model. This makes possible an easy transition and coexistence of both models in the GEO portal. The GEOSS Portal enables the user to discover data introducing keywords and other filtering criteria. Once the list of results is returned, the user can read more about each result based on the metadata available in the GEOSS catalogues. In GeoViQua, we developed several metadata-based visualization tools through applying an XSLT allowing specialized visualization focused in different aspects. A provenance-based visualization tool emphasising the sequence of processes and data sources and its detailed description, has been integrated. It is also possible to assess the completeness of the metadata records using an extension of an initially developed by NOAA tool that is now enriched with quality indicators assessment. A metadata comparison tool is able to present metadata document side-by-side allowing parallel reading of the different metadata tags. Furthermore, numerical values are automatically compared and the best one is illuminated in green background colour, and a star plot and a parallel coordinates plot are used to easily show a graph with the best dataset as the large polygon and the highest line. Each summary result combines both producer metadata with the possibility for the user to submit feedback consisting in ratings, comments, expert reviews, links to documentation and new quality indicators. Previous feedback items, coming for other users are shown next to the summary producer metadata. Also the both sum- maries and extended descriptions of each result are accompanied by a GEO label (i.e. a graphical representation of the presence and content of some aspects of the metadata that users data users interviewed by the project found important for them. Additionally, an special icon indicates the presence, and the possibility of showing in a WMS client, data values combined with other pixel level quality estimators in a single view (e.g. the mean and the variance of a measure that has an spatial distribution). The research leading to these results has received funding from the European Union Seventh Framework Programme (FP7/2010-2013) under grant agreement no. 265178.}, Keywords = {abstract}, Owner = {daniel}, Timestamp = {2014.10.11}, Url = {http://meetingorganizer.copernicus.org/EGU2014/EGU2014-15739.pdf} } @Conference{Maso2015, Title = {Towards the creation of a European Network of Earth Observation Networks within GEO. The ConnectinGEO project.}, Author = {Mas\'{o}, Joan and Serral, Ivette and Menard, Lionel and Wald, Lucien and Nativi, Stefano and Plag, Hans-Peter and Jules-Plag, Shelley and N{\"u}st, Daniel and Jirka, Simon and Pearlman, Jay and De Maziere, Martine}, author+an = {8=highlight}, Booktitle = {EGU General Assembly Conference Abstracts}, Year = {2015}, Organization = {European Geophysical Union}, Pages = {13792}, Series = {Geophysical Research Abstracts}, Volume = {17}, Abstract = {ConnectinGEO (Coordinating an Observation Network of Networks EnCompassing saTellite and IN-situ to fill the Gaps in European Observations) is a new H2020 Coordination and Support Action with the primary goal of linking existing Earth Observation networks with science and technology (S&T) communities, the industry sector, the Group on Earth Observations (GEO), and Copernicus. ConnectinGEO aims to facilitate a broader and more accessible knowledge base to support the needs of GEO, its Societal Benefit Areas (SBAs) and the users of the Global Earth Observing System of Systems (GEOSS). A broad range of subjects from climate, natural resources and raw materials, to the emerging UN Sustainable Development Goals (SDGs) will be addressed. The project will generate a prioritized list of critical gaps within available observation data and models to translate observations into practice-relevant knowledge, based on stakeholder consultation and systematic analysis. Ultimately, it will increase coherency of European observation networks, increase the use of Earth observations for assessments and forecasts and inform the planning for future observation systems. ConnectinGEO will initiate a European Network of Earth Observation Networks (ENEON) that will en- compass space-based, airborne and in-situ observations networks. ENEON will be composed by project partners representing thematic observation networks along with the GEOSS Science and Technology Stakeholder Network, GEO Communities of Practices, Copernicus services, Sentinel missions and in-situ support data representatives, representatives of the space-based, airborne and in-situ observations European networks (e.g. EPOS, EMSO and GROOM, etc), representatives of the industry sector and European and national funding agencies, in particular those participating in the future ERA-PlaNET. At the beginning, the ENEON will be created and managed by the project. Then the management will be transferred to the network itself to ensure its future continuity. ConnectinGEO's main goal in ENEON is to mature a consultation complemented by a systematic analysis of available data and metadata, which will draw for the first time a coherent picture of the variety of used data interfaces, policies and indicators. This way, the project will stimulate a harmonized and coherent coverage of the European EO networks, reemphasizing the political strategic targets, create opportunities for SMEs to develop products based on the current networks, and open avenue for industry to participate in investments addressing the identified high-priority gaps. The project starts in February 2015 and will last two years. We will present the five threads of the project for gap analysis in the Earth observation networks: global requirements and goals, international research pro- grams, consultation process, systematic analysis of existing data platforsm and industry challenges. The presentation will provide both an overview of the network concepts and approaches and discuss participation of the broader scientific community of data providers and users.}, Keywords = {abstract}, Owner = {daniel}, Timestamp = {2015.06.01}, Url = {http://meetingorganizer.copernicus.org/EGU2015/EGU2015-13792.pdf} } @Conference{Menard2015, Title = {Open Surface Solar Irradiance Observations - A Challenge}, Author = {Menard, Lionel and N{\"u}st, Daniel and Jirka, Simon and Mas\'{o}, Joan and Ranchin, Thierry and Wald, Lucien}, author+an = {2=highlight}, Booktitle = {EGU General Assembly Conference Abstracts}, Year = {2015}, Organization = {European Geophysical Union}, Pages = {6607}, Series = {Geophysical Research Abstracts}, Volume = {17}, Abstract = {The newly started project ConnectinGEO funded by the European Commission aims at improving the under- standing on which environmental observations are currently available in Europe and subsequently providing an informational basis to close gaps in diverse observation networks. The project complements supporting actions and networking activities with practical challenges to test and improve the procedures and methods for identifying observation data gaps, and to ensure viability in real world scenarios. We present a challenge on future concepts for building a data sharing portal for the solar energy industry as well as the state of the art in the domain. Decision makers and project developers of solar power plants have identified the Surface Solar Irradiance (SSI) and its components as an important factor for their business development. SSI observations are crucial in the process of selecting suitable locations for building new plants. Since in-situ pyranometric stations form a sparse network, the search for locations starts with global satellite data and is followed by the deployment of in-situ sensors in selected areas for at least one year. To form a convincing picture, answers must be sought in the conjunction of these EO systems, and although companies collecting SSI observations are willing to share this information, the means to exchange in-situ measurements across companies and between stakeholders in the market are still missing. We present a solution for interoperable exchange of SSI data comprising in-situ time-series observations as well as sensor descriptions based on practical experiences from other domains. More concretely, we will apply concepts and implementations of the Sensor Web Enablement (SWE) framework of the Open Geospatial Consortium (OGC). The work is based on an existing spatial data infrastructure (SDI), which currently comprises metadata, maps and coverage data, but no in-situ observations yet. This catalogue is already registered in the GEOSS Common Infrastructure (GCI). We describe the challenges and approach to introduce a suite of standards and best practices into the GEO Energy Societal Benefit Area for solar radiation measurements. Challenges range from spatio-temporal coverage across different scales and data quality to intellectual property rights and existing terminology. The approach includes means to share observations based on standardized data and metadata models and a user-friendly data exploration/management tool. The possibility to access and share data considerably improves the information base for strategic planning and control of new solar power resources. The platform will be integrated as a new component into the Webservice-Energy.org GEOSS Community Portal dedicated to Energy and Environment. The ability to provide users with visualisation and download features for in-situ measurements is seen as a key aspect to start engaging the energy community to share, release and integrate more in-situ measurements. This will put to the test the capacity of cooperation in the SSI community by introducing an unprecedented level of collaboration and eventually help to detect gaps in European earth observation networks. The presentation will be an opportunity to seek further collaboration partners and feedback by the community.}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2015.06.01}, Url = {http://meetingorganizer.copernicus.org/EGU2015/EGU2015-6607.pdf} } @Article{menard_interoperable_2015, Title = {Interoperable {Exchange} of {Surface} {Solar} {Irradiance} {Observations}: {A} {Challenge}}, Author = {Menard, Lionel and N{\"u}st, Daniel and Ngo, Khai-Minh and Blanc, Philippe and Jirka, Simon and Mas\'{o} Pau, Joan and Ranchin, Thierry and Wald, Lucien}, author+an = {2=highlight}, Journal = {Energy Procedia}, Year = {2015}, Month = aug, Pages = {113--120}, Volume = {76}, Abstract = {We present how implementations of the Sensor Web Enablement framework of the Open Geospatial Consortium are integrated into an existing spatial data infrastructure. The result is registered as a community portal for professionals in solar energy in the GEOSS Common Infrastructure, demonstrating the benefits of interoperable exchange of in-situ time-series observations of surface solar irradiation. Easy access to, and sharing of data improves the information base for planning and monitoring of solar power resources. Providing users with visualization and download functionality for in-situ measurements is a key aspect for engaging the energy community to share, release and integrate in-situ measurements.}, Doi = {10.1016/j.egypro.2015.07.867}, ISSN = {1876-6102}, Keywords = {GEOSS, open data, Sensor web enablement, solar energy, Spatial data infrastructure, SSI, surface solar irradiance, web}, Owner = {daniel}, Series = {European {Geosciences} {Union} {General} {Assembly} 2015 - {Division} {Energy}, {Resources} and {Environment}, {EGU} 2015}, Shorttitle = {Interoperable {Exchange} of {Surface} {Solar} {Irradiance} {Observations}}, Timestamp = {2015.10.15}, Url = {http://www.sciencedirect.com/science/article/pii/S1876610215016434} } @Conference{Nuest2016, Title = {Opening Reproducible Research}, Author = {N{\"u}st, Daniel}, author+an = {1=highlight}, Booktitle = {DASPOS Workshop on Container Strategies for Data and Software Preservation that Promote Open Science}, Year = {2016}, Month = {5}, Organization = {University of Notre Dame}, Comment = {https://daspos.crc.nd.edu/index.php/14-daspos/workshops/55-workshop-7speak#dnust}, Keywords = {abstract}, Owner = {daniel}, Timestamp = {2016.06.04}, Url = {https://osf.io/h2u6w/} } @Conference{Nuest2015, Title = {A case for user-generated sensor metadata}, Author = {N{\"u}st, Daniel}, author+an = {1=highlight}, Booktitle = {EGU General Assembly Conference Abstracts}, Year = {2015}, Organization = {European Geophysical Union}, Pages = {5134}, Series = {Geophysical Research Abstracts}, Volume = {17}, Abstract = {Cheap and easy to use sensing technology and new developments in ICT towards a global network of sensors and actuators promise previously unthought of changes for our understanding of the environment. Large professional as well as amateur sensor networks exist, and they are used for specific yet diverse applications across domains such as hydrology, meteorology or early warning systems. However the impact this ``abundance of sensors'' had so far is somewhat disappointing. There is a gap between (community-driven) sensor networks that could provide very useful data and the users of the data. In our presentation, we argue this is due to a lack of metadata which allows determining the fitness of use of a dataset. Syntactic or semantic interoperability for sensor webs have made great progress and continue to be an active field of research, yet they often are quite complex, which is of course due to the complexity of the problem at hand. But still, we see the most generic information to determine fitness for use is a dataset's provenance, because it allows users to make up their own minds independently from existing classification schemes for data quality. In this work we will make the case how curated user-contributed metadata has the potential to improve this situation. This especially applies for scenarios in which an observed property is applicable in different domains, and for set-ups where the understanding about metadata concepts and (meta-)data quality differs between data provider and user. On the one hand a citizen does not understand the ISO provenance metadata. On the other hand a researcher might find issues in publicly accessible time series published by citizens, which the latter might not be aware of or care about. Because users will have to determine fitness for use for each application on their own anyway, we suggest an online collaboration platform for user-generated metadata based on an extremely simplified data model. In the most basic fashion, metadata generated by users can be boiled down to a basic property of the world wide web: many information items, such as news or blog posts, allow users to create comments and rate the content. Therefore we argue to focus a core data model on one text field for a textual comment, one optional numerical field for a rating, and a resolvable identifier for the dataset that is commented on. We present a conceptual framework that integrates user comments in existing standards and relevant appli- cations of online sensor networks and discuss possible approaches, such as linked data, brokering, or standalone metadata portals. We relate this framework to existing work in user generated content, such as proprietary rating systems on commercial websites, microformats, the GeoViQua User Quality Model, the CHARMe annotations, or W3C Open Annotation. These systems are also explored for commonalities and based on their very useful concepts and ideas; we present an outline for future extensions of the minimal model. Building on this framework we present a concept how a simplistic comment-rating-system can be extended to capture provenance information for spatio-temporal observations in the sensor web, and how this framework can be evaluated.}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2015.06.01}, Url = {http://meetingorganizer.copernicus.org/EGU2015/EGU2015-5634.pdf} } @Conference{Nuest2013, Title = {{Web Processing Standards and Application Patterns}}, Author = {N{\"u}st, Daniel}, author+an = {1=highlight}, Booktitle = {4th Workshop on the use of GIS/OGC standards in meteorology, session 4: Application of standards}, Year = {2013}, Month = {3}, Organization = {ECMWF}, Keywords = {presentation}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://old.ecmwf.int/newsevents/meetings/workshops/2013/GIS-OGC_standards/Presentations/pdfs/Nuest.pdf} } @Conference{Nuest2013a, Title = {{Visualising Interpolations of Mobile Sensor Observations}}, Author = {N{\"u}st, Daniel}, author+an = {1=highlight}, Booktitle = {GeoViz Hamburg}, Year = {2013}, Abstract = {Technical advances in mobile in-situ sensors open up new opportunities for providing an unprecedented amount of live environmental sensor data and allow measuring the same phenomenon at different times and places. If a phenomenon�s characteristics allow (and often even if they do not), spatial interpolation of potentially sparse measurements is used to infer values at not-probed locations and thus increasing spatial coverage. However, this coverage can misleadingly suggest a non-existing level of detail. Interpolation techniques often assume the measurements as snapshot of one point in time. But having a moving sensor one must consider the temporal and spatial variation of both sensor and the spatially distributed phenomenon as the age of a measurement has an impact on the interpretability of a value. This work develops methods combining the visualisation of sensor platforms and interpolations based upon the sensor�s observations. The goal is to increase the understanding of the observed dynamic environmental properties by creating a specialised information product from sensor data. The methods utilize explorative data analysis (Andrienko and Andrienko, 2006), i.e. they leverage the user's experience and vision capabilities in combination with interactive parameters to create multiple critical views on the data supporting a trial & error process . We conclude that a concurrent interactive specialised visualisation is a reasonable first step to provide the user with an idea of contextual uncertainty for interpolations based on mobile sensors. While existing research and visualisations cover rather large stationary sensor networks, the approach of "few but mobile" sensors requires further work not only in the fields of hardware and communication, but also in time-aware presentations.}, Keywords = {presentation}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://www.geomatik-hamburg.de/geoviz/program.html} } @Conference{Nuest2012a, Title = {{WPS Application Patterns}}, Author = {N{\"u}st, Daniel}, author+an = {1=highlight}, Booktitle = {Workshop: Models for scientific exploitation of EO Data}, Year = {2012}, Organization = {HMA Architecture Working Group}, Keywords = {presentation}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {https://wiki.services.eoportal.org/tiki-index.php?page=Models+for+scientific+exploitation+of+EO+Data} } @Manual{nust2012accessing, Title = {{Accessing Data from Sensor Observation Services: the sos4R Package}}, Author = {N{\"u}st, Daniel}, author+an = {1=highlight}, Year = {2012}, Abstract = {The sos4R package provides simple yet powerful access to OGC Sensor Observation Service instances. The package supports both encapsulation and abstraction from the service interface for novice users as well as powerful request building for specialists. sos4R is motivated by the idea to close the gap between the Sensor Web and tools for (geo-)statistical analyses. It implements the core profile of the SOS specification and supports temporal, spatial, and thematical filtering of observations. This document briefly introduces the SOS specification. The package's features are explained extensively: exploration of service metadata, request building with filters, function exchangeability, result data transformation. The package is published under GPL 2 license within the geostatistics community of 52\degree North Initiative for Geospatial Open Source Software.}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://cran.r-project.org/web/packages/sos4R/vignettes/sos4R.pdf} } @InProceedings{Nust2010, Title = {{Visualizing the Availability of Temporally-Structured Sensor Data}}, Author = {N{\"u}st, Daniel and Bache, Felix and Br{\"o}ring, Arne and Stasch, Christoph and Jirka, Simon}, author+an = {1=highlight}, Booktitle = {AGILE 2010: The 13th AGILE International Conference on Geographic Information Science, Short Papers}, Year = {2010}, Address = {Guimaraes, Portugal}, Editor = {Marco Painho and Maribel Yasmina Santos and Hardy Pundt}, Month = {5}, Owner = {Daniel}, Timestamp = {2011.03.07}, Url = {http://www.agile-online.org/Conference_Paper/CDs/agile_2010/ShortPapers_PDF%5C103_DOC.pdf} } @Conference{Nuest2014a, Title = {{Open Source and GitHub for Teaching with Software Development Projects}}, Author = {N{\"u}st, Daniel and Bartoschek, Thomas}, author+an = {1=highlight}, Booktitle = {Presentations at FOSS4G-Europe, Bremen}, Year = {2014}, Month = {7}, Keywords = {presentation}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://www.slideshare.net/nuest/2014-foss4-geosgithubteachingdanielnuest} } @Conference{NuestEGU2016, Title = {Opening Reproducible Research}, Author = {N{\"u}st, Daniel and Konkol, Markus and Pebesma, Edzer and Kray, Christian and Kl{\"o}tgen, Stephanie and Schutzeichel, Marc and Lorenz, J{\"o}rg and Przibytzin, Holger and Kussmann, Dirk}, author+an = {1=highlight}, Booktitle = {EGU Geophysical Research Abstracts}, Year = {2016}, Organization = {European Geophysical Union}, Pages = {7396}, Series = {Geophysical Research Abstracts}, Volume = {18}, Abstract = {Open access is not only a form of publishing such that research papers become available to the large public free of charge, it also refers to a trend in science that the act of doing research becomes more open and transparent. When science transforms to open access we not only mean access to papers, research data being collected, or data being generated, but also access to the data used and the procedures carried out in the research paper. Increasingly, scientific results are generated by numerical manipulation of data that were already collected, and may involve simulation experiments that are completely carried out computationally. Reproducibility of research findings, the ability to repeat experimental procedures and confirm previously found results, is at the heart of the scientific method (Pebesma, Nüst and Bivand, 2012). As opposed to the collection of experimental data in labs or nature, computational experiments lend themselves very well for reproduction. Some of the reasons why scientists do not publish data and computational procedures that allow reproduction will be hard to change, e.g. privacy concerns in the data, fear for embarrassment or of losing a competitive advantage. Others reasons however involve technical aspects, and include the lack of standard procedures to publish such information and the lack of benefits after publishing them. We aim to resolve these two technical aspects. We propose a system that supports the evolution of scientific publications from static papers into dynamic, executable research documents. The DFG-funded experimental project Opening Reproducible Research (ORR) aims for the main aspects of open access, by improving the exchange of, by facilitating productive access to, and by simplifying reuse of research results that are published over the Internet. Central to the project is a new form for creating and providing research results, the executable research compendium ERC), which not only enables third parties to reproduce the original research and hence recreate the original research results (figures, tables), but also facilitates interaction with them as well as their recombination with new data or methods. Building on existing open standards and software, this project develops standards and tools for ERCs, and will demonstrate and evaluate these, focusing on the geosciences domains. The project goes beyond a technical solution for ERCs by evaluating the system from the perspectives of geoscience researchers as participants in a scientific publication process. It will focus on the statistical environment R, but also evaluate larger run time systems captured in virtual environments (Docker containers). ERCs are built upon and integrate well with both established day-to-day workflows of digital research and the scientific publication process. They make research accessible on different levels at any stage to anyone via open web platforms. Other scientists can transfer a compendium of software and tools to their own local environment and collaborate, while others make minimal changes and compare changed results in a web browser. Building on recent advances in mainstream IT, ORR envisions a new architecture for storing, executing and interacting with the original analysis environment alongside the corresponding research data and text. ORR bridges the gap between long-term archives, practical geoscience researchers, as well as publication media. Consequently, the project team seeks input and feedback from researchers working with geospatial data to ensure usable and useful open access publications as well as a publication process that minimizes effort while maximizing usability and re-usability. References Pebesma, E., D. Nüst, R. Bivand, 2012. The R software environment in reproducible geoscientific research. Eos, Transactions American Geophysical Union 93, vol 16, p. http://dx.doi.org/10.1029/2012EO160003163-164. Opening Reproducible Research project description and website: https://www.uni-muenster.de/forschungaz/project/9520?lang=en}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2015.06.01}, Url = {http://meetingorganizer.copernicus.org/EGU2016/EGU2016-7396.pdf} } @InProceedings{Nuest2015b, Title = {{A GEO label for the Sensor Web }}, Author = {N{\"u}st, Daniel and Lush, Victoria}, author+an = {1=highlight}, Booktitle = {The 18th AGILE International Conference on Geographic Information Science, Short Papers}, Year = {2015}, Address = {Lisboa, Portugal}, Journal = {AGILE}, Owner = {Daniel}, Timestamp = {2015.06.30}, Url = {http://www.agile-online.org/Conference_Paper/cds/agile_2015/shortpapers/115/115_Paper_in_PDF.pdf} } @Conference{Nuest2015c, Title = {{ConnectinGEO Observations Inventory }}, Author = {N{\"u}st, Daniel and Mas\'{o} Pau, Joan and Nativi, Stefano}, author+an = {1=highlight}, Booktitle = {The 18th AGILE International Conference on Geographic Information Science, Posters}, Year = {2015}, Address = {Lisboa, Portugal}, Note = {poster session}, Journal = {AGILE}, Keywords = {poster}, Owner = {Daniel}, Timestamp = {2015.06.30} } @Conference{Nuest2012, Title = {{R in the Sensor Web}}, Author = {N{\"u}st, Daniel and Pebesma, Edzer}, author+an = {1=highlight}, Booktitle = {Sensing a Changing World Workshop}, Year = {2012}, Month = {5}, Abstract = {The sensor web enablement initiative designed services for sensor data handling, discovery and eventing, but did not cover the processing of it. Independent but at the same time the R community developed an interoperable platform for data analysis. We present novel technologies that connect the sensor web with R and vice versa, enabling the processing of sensor web data and dissemination of the results.}, Keywords = {R, sensor web, spatio-temporal, abstract}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {https://www.wageningenur.nl/upload_mm/5/7/e/4561f473-77d4-4b60-8947-9b0964145e04_Nust_etal.pdf} } @Conference{Nuest2014b, Title = {{5 Star Open Geoprocessing}}, Author = {N{\"u}st, Daniel and Pro{\ss}, Benjamin}, author+an = {1=highlight}, Booktitle = {AGILE \& EuroSDR workshop: Geoprocessing on the Web}, Year = {2014}, Keywords = {presentation}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://www.5starprocessing.info/} } @Conference{Nuest2014, Title = {{JavaScript Client Libraries for the (Former) Long Tail of OGC Standards}}, Author = {N{\"u}st, Daniel and Rieke, Matthes and Breen, Paul}, author+an = {1=highlight}, Booktitle = {Presentations at FOSS4G-Europe, Bremen}, Year = {2014}, Month = {7}, Abstract = {More and more information technology is moving into a cloud-based infrastructures for both data storage as well as user interfaces. With the advancement of browser technology, especially regarding Javascript engines, the user interfaces follow this move based on HTML5, also for mobile devices. One key advantage is clear: users always use the latest version and the environment is well controlled: an internet browser. General purpose libraries (e.g. jQuery), frameworks with user interfaces (e.g. Dojo Widgets) as well as web-application libs (e.g. AngularJS) facilitate the development of complex applications. In the geospatial domain, such frameworks and libraries are are successfully used to build complex applications and are often based on OpenLayers (OL) or Leaflet for mapping and visualisation. These libs support display of geospatial data coming from standardized view and feature services, most importantly the Open Geospatial Consortium's (OGC) Web Map Service (WMS) and Web Features Service (WFS). Both server and client libraries are mature and have reached a very stable level and wide distribution. What is missing today are generic libraries that operate at the same level of performance and quality to (i) access observation and time series data, for example coming from OGC Sensor Observation Services (SOS) as part of the Sensor Web Enablement (SWE) suite of standards, and (ii) control processes published online, for examples as an OGC Web Processing Service (WPS). These standards are less widespread than WMS and WFS but gain momentum as data volumes increase, for example with a myriad of smart sensors in the internet of things or new EO satellite missions, and subsequent requirements for sophisticated architectures for (event-based) processing and management of time series data. SWE standards have just reached their second versions; a new WPS standard is currently under development. Observing these developments lead to the birth of two new open source Javascript library projects that are presented in this talk. SOS.js is a Javascript framework to access SOS data and build sophisticated lightweight browser applications for discovering and displaying time series data as plots, tables, and maps. It consists of two modules: core and user interface. wps-js is a Javascript client library for the WPS generating forms based on the standardized metadata from the service and interactively creating and submitting processing tasks. It uses a templating mechanism for XML building and an internal Javascript class hierarchy. Both libraries are based on OL's request and response encoding. During the talk we will demonstrate sample applications build with the libraries and share experiences of developing client libraries for XML-based standardized web services with Javascript, which include programming as well as project build and management lessons. One goal for both libraries is to become independent of OL and provide service access with a minimal footprint, for example to display data without maps. Might OL and Leaflet eventually use these libraries instead of their own client implementations for SOS and WPS? We see an advantage of developing such small and focussed libraries maintained by field experts in these non-mainstream domains. We will happily discuss if this is the best approach and pose the following question: Is there a (technical, organisational) way to build a compatible Javascript client frameworks across all geo-service standards? We conclude that Javascript is ready to handle raw (timeseries) data and it is used more than ever. Also, both the standards and their open source implementations are ready for operational deployments. So it is now time to spread them further by increasing the usability with good browser client applications based on small and flexible open source libraries. While the presented libraries are developed withing the 52\degree North communities we want to use this talk to actively reach out to members of other open source projects to seek collaborators and to organise interoperability tests to make these tools useful for a broader community.}, Keywords = {presentation}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://www.slideshare.net/nuest/javascript-client-libraries-for-the-former-long-tail-of-ogc-standards} } @InProceedings{Nust2011, Title = {{Connecting R to the Sensor Web}}, Author = {N{\"u}st, Daniel and Stasch, Christoph and Pebesma, Edzer J.}, author+an = {1=highlight}, Booktitle = {Advancing Geoinformation Science for a Changing World}, Year = {2011}, Editor = {Stan Geertman and Wolfgang Reinhardt and Fred Toppen}, Number = {3}, Pages = {227 - 246}, Publisher = {Springer}, Series = {Lecture Notes in Geoinformation and Cartography}, Volume = {1}, Abstract = {Interoperable data exchange and reproducibility are increasingly important for modern scientific research. This paper shows how three open source projects work together to realize this: (i) the R project, providing the lingua franca for statistical analysis, (ii) the Open Geospatial Consortium's Sensor Observation Service (SOS), a standardized data warehouse service for storing and retrieving sensor measurements, and (iii) sos4R, a new project that connects the former two. We show how sos4R can bridge the gap between two communities in science: spatial statistical analysis and visualization on one side, and the Sensor Web community on the other. sos4R enables R users to integrate (near real-time) sensor observations directly into R. Finally, we evaluate the functionality of sos4R. The software encapsulates the service's complexity with typical R function calls in a common analysis workflow, but still gives users full flexibility to handle interoperability issues. We conclude that it is able to close the gap between R and the sensor web.}, Doi = {10.1007/978-3-642-19789-5_12}, Owner = {Daniel}, Timestamp = {2011.03.07}, Url = {http://link.springer.com/chapter/10.1007%2F978-3-642-19789-5_12} } @Conference{Nuest2015a, Title = {Open and reproducible global land use classification}, Author = {N{\"u}st, Daniel and V{\'{a}}clav{\'{i}}k, Tom{\'{a}}{\v{s}} and Pross, Benjamin}, author+an = {1=highlight}, Booktitle = {EGU General Assembly Conference Abstracts}, Year = {2015}, Organization = {European Geophysical Union}, Pages = {9125}, Series = {Geophysical Research Abstracts}, Volume = {17}, Abstract = {Researchers led by the Helmholtz Centre for Environmental research (UFZ) developed a new world map of land use systems based on over 30 diverse indicators (http://geoportal.glues.geo.tu- dresden.de/stories/landsystemarchetypes.html) of land use intensity, climate and environmental and socioe- conomic factors. They identified twelve land system archetypes (LSA) using a data-driven classification algorithm (self-organizing maps) to assess global impacts of land use on the environment, and found unexpected similarities across global regions. We present how the algorithm behind this analysis can be published as an executable web process using 52$^{\circ}$North WPS4R (https://wiki.52north.org/bin/view/Geostatistics/WPS4R) within the GLUES project (http://modul-a.nachhaltiges-landmanagement.de/en/scientific-coordination-glues/). WPS4R is an open source collaboration platform for researchers, analysts and software developers to pub- lish R scripts (http://www.r-project.org/) as a geo-enabled OGC Web Processing Service (WPS) process. The interoperable interface to call the geoprocess allows both reproducibility of the analysis and integration of user data without knowledge about web services or classification algorithms. The open platform allows everybody to replicate the analysis in their own environments. The LSA WPS process has several input parameters, which can be changed via a simple web interface. The input parameters are used to configure both the WPS environment and the LSA algorithm itself. The encapsulation as a web process allows integration of non-public datasets, while at the same time the publication requires a well-defined documentation of the analysis. We demonstrate this platform specifically to domain scientists and show how reproducibility and open source publication of analyses can be enhanced. We also discuss future extensions of the reproducible land use classification, such as the possibility for users to enter their own areas of interest to the system and generate summary statistics relating the particular area to the land system archetype. Such an extension demonstrates the advantages of open geoprocesses, because the user does not need to replicate the whole workflow, which included considerable data preparation steps, and can still access an analysis result tailored to his needs. The LSAs are the basis for science-based policy recommendations for sustainable land management and yield improvement at a global scale. The reproducibility of the study strengthens the scientific work and the open source platform allows scientists to adapt and extend it to provide new original contributions to sustainable land use management.}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2015.06.01}, Url = {http://meetingorganizer.copernicus.org/EGU2015/EGU2015-9125.pdf} } @Book{2015:52north:pubopen, Title = {Publishing Research Software as Open Source on GitHub}, Author = {N\"ust, Daniel and Jirka, Simon and Hitchcock, Ann}, author+an = {1=highlight}, Publisher = {52${}^\circ$North}, Year = {2015}, Month = {6}, HowPublished = {online}, Url = {https://www.gitbook.com/book/52north/pubopen} } @Article{pebesma2012r, Title = {The R software environment in reproducible geoscientific research}, Author = {Pebesma, Edzer and N{\"u}st, Daniel and Bivand, Roger}, author+an = {2=highlight}, Journal = {Eos, Transactions American Geophysical Union}, Year = {2012}, Number = {16}, Pages = {163--163}, Volume = {93}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://ifgi.uni-muenster.de/~epebe_01/eos.pdf} } @Conference{read2014, Title = {{A suite of R packages for web-enabled modeling and analysis of surface waters}}, Author = {Read, Jordan Stuart and Winslow, Luke A. and N{\"u}st, Daniel and De Cicco, Laura and Walker, Jordan I}, author+an = {3=highlight}, Booktitle = {American Geophysical Union Fall Meeting, San Francisco, California}, Year = {2014}, Organization = {American Geophysical Union}, Series = {Session H44D: Open-Source Tools and Software Development for the Hydrological Sciences I}, Abstract = {Researchers often create redundant methods for downloading, manipulating, and analyzing data from online resources. Moreover, the reproducibility of science can be hampered by complicated and voluminous data, lack of time for documentation and long-term maintenance of software, and fear of exposing programming skills. The combination of these factors can encourage unshared one-off programmatic solutions instead of openly provided reusable methods. Federal and academic researchers in the water resources and informatics domains have collaborated to address these issues. The result of this collaboration is a suite of modular R packages that can be used independently or as elements in reproducible analytical workflows. These documented and freely available R packages were designed to fill basic needs for the effective use of water data: the retrieval of time-series and spatial data from web resources (dataRetrieval, geoknife), performing quality assurance and quality control checks of these data with robust statistical methods (sensorQC), the creation of useful data derivatives (including physically- and biologically-relevant indices; GDopp, LakeMetabolizer), and the execution and evaluation of models (glmtools, rLakeAnalyzer). Here, we share details and recommendations for the collaborative coding process, and highlight the benefits of an open-source tool development pattern with a popular programming language in the water resources discipline (such as R). We provide examples of reproducible science driven by large volumes of web-available data using these tools, explore benefits of accessing packages as standardized web processing services (WPS) and present a working platform that allows domain experts to publish scientific algorithms in a service-oriented architecture (WPS4R). We assert that in the era of open data, tools that leverage these data should also be freely shared, transparent, and developed in an open innovation environment.}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://adsabs.harvard.edu/abs/2014AGUFM.H44D..02R} } @Conference{StaschEGU2016, Title = {Sharing environmental models: An Approach using GitHub repositories and Web Processing Services}, Author = {Stasch, Christoph and N{\"u}st, Daniel and Pross, Benjamin}, author+an = {2=highlight}, Booktitle = {EGU Geophysical Research Abstracts}, Year = {2016}, Organization = {European Geophysical Union}, Pages = {6574}, Series = {Geophysical Research Abstracts}, Volume = {18}, Abstract = {The GLUES (Global Assessment of Land Use Dynamics, Greenhouse Gas Emissions and Ecosystem Services) project established a spatial data infrastructure for scientific geospatial data and metadata (http://geoportal-glues.ufz.de), where different regional collaborative projects researching the impacts of climate and socio-economic changes on sustainable land management can share their underlying base scenarios and datasets. One goal of the project is to ease the sharing of computational models between institutions and to make them easily executable in Web-based infrastructures. In this work, we present such an approach for sharing computational models relying on GitHub repositories (http://github.com) and Web Processing Services. At first, model providers upload their model implementations to GitHub repositories in order to share them with others. The GitHub platform allows users to submit changes to the model code. The changes can be discussed and reviewed before merging them. However, while GitHub allows sharing and collaborating of model source code, it does not actually allow running these models, which requires efforts to transfer the implementation to a model execution framework. We thus have extended an existing implementation of the OGC Web Processing Service standard (http://www.opengeospatial.org/standards/wps), the 52°North Web Processing Service (http://52north.org/wps) platform to retrieve all model implementations from a git (http://git-scm.com) repository and add them to the collection of published geoprocesses. The current implementation is restricted to models implemented as R scripts using WPS4R annotations (Hinz et al.) and to Java algorithms using the 52°North WPS Java API. The models hence become executable through a standardized Web API by multiple clients such as desktop or browser GIS and modelling frameworks. If the model code is changed on the GitHub platform, the changes are retrieved by the service and the processes will be updated accordingly. The admin tool of the 52°North WPS was extended to support automated retrieval and deployment of computational models from GitHub repositories. Once the R code is available in the GitHub repo, the contained process can be easily deployed and executed by simply defining the GitHub repository URL in the WPS admin tool. We illustrate the usage of the approach by sharing and running a model for land use system archetypes developed by the Helmholtz Centre for Environmental Research (UFZ, see Vaclavik et al.). The original R code was extended and published in the 52°North WPS using both, public and non-public datasets (Nüst et al., see also https://github.com/52North/glues-wps). Hosting the analysis in a Git repository now allows WPS administrators, client developers, and modelers to easily work together on new versions or completely new web processes using the powerful GitHub collaboration platform. References: Hinz, M. et. al. (2013): Spatial Statistics on the Geospatial Web. In: The 16th AGILE International Conference on Geographic Information Science, Short Papers. http://www.agile-online.org/Conference_Paper/CDs/agile_2013/Short_Papers/SP_S3.1_Hinz.pdf Nüst, D. et. al.: (2015): Open and reproducible global land use classification. In: EGU General Assembly Conference Abstracts. Vol 17. European Geophysical Union, 2015, p. 9125, http://meetingorganizer.copernicus.org/EGU2015/EGU2015- 9125.pdf Vaclavik, T., et. al. (2013): Mapping global land system archetypes. Global Environmental Change 23(6): 1637-1647. Online available: October 9, 2013, DOI: 10.1016/j.gloenvcha.2013.09.004}, Comment = {Slides: http://presentations.copernicus.org/EGU2016-6574_presentation.pdf}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2015.06.01}, Url = {http://meetingorganizer.copernicus.org/EGU2016/EGU2016-7396.pdf} } @Conference{Stasch2014, Title = {{enviroCar---Open car data and open analysis tools for sustainable transportation development}}, Author = {Stasch, Christoph and N{\"u}st, Daniel and Rieke, Matthes and Remke, Albert and Pebesma, Edzer}, author+an = {2=highlight}, Booktitle = {The international conferences ICT4S - ICT for Sustainability}, Year = {2014}, Month = {8}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://2014.ict4s.org/files/2014/08/23-Envirocar.pdf} } @Conference{Stasch2015, Title = {enviroCar---citizen science for sustainable traffic}, Author = {Stasch, Christoph and Remke, Albert and Jirka, Simon and N{\"u}st, Daniel}, author+an = {4=highlight}, Booktitle = {EGU General Assembly Conference Abstracts}, Year = {2015}, Organization = {European Geophysical Union}, Pages = {4429}, Series = {Geophysical Research Abstracts}, Volume = {17}, Abstract = {Optimizing traffic flow is a challenging task, affecting both the mobility of people and the environment. Up to now, traffic monitoring is based on small samples using GPS devices or remote sensors such as cameras. Citizens are usually not actively involved in the process of collecting or analyzing traffic data. The enviroCar project (www.envirocar.org) aims at addressing this situation by providing an open platform that can be used by everyone to collect and analyze traffic-related data and thus to achieve sustainable traffic management by answering questions such as: How is the average speed on a certain route? Where are exceptionally long waiting times in front of traffic lights? At which crossings do more cars stop than drive through? Where are hotspots of fuel consumption and air pollutant emission during a certain time interval? In this presentation, an overview on the enviroCar project is given and current research challenges addressed in the context of the project are presented. Citizens are able to participate by registering at the enviroCar portal and downloading the enviroCar Android app. Once installed, the Android app allows citizens to collect car sensor data, e.g. speed, mass air flow, or intake temperature via an On-Board Diagnosis 2 (OBD-II) Adapter. After finishing a car ride, the data can be uploaded to the central enviroCar server where the data is anonymized and published as open data. Each enviroCar member has a profile page giving control on his own data and providing statistics on personal driving behavior. The portal also allows comparing personal statistics with the statistics of other members. It thus facilitates analysis whether, for example, a member is driving in a more fuel saving manner than other users. Besides only acting as a data collector, citizens can also explore the enviroCar data in online maps or download the data in standard formats for certain spatial areas and/or time intervals allowing them to conduct spatio-temporal analyses by themselves. Thus, the platform also provides a means to analyze issues, such as repeated stops at a particular traffic light, and to communicate the results to other stakeholders, e.g. traffic planners or politicians. For traffic planners, the enviroCar project can also serve as a valuable additional data source for evaluating certain decisions, e.g. changing traffic light sequences. As not only the pure GPS data but also the car sensor data is collected, enviroCar enables to directly relate the traffic data to environmental parameters such as air pollutant emissions and thus to identify, for example, hotspots of CO 2 emissions in a street network. Current research activities comprise technical issues, such as implementing scalable solutions for visualizing and analyzing big data sets, on improving estimation methods for fuel consumption and air pollutant emissions, but also include the development of novel spatio-temporal analysis and visualization methods and novel incentives for participation in crowd-sourcing and analyzing geospatial information.}, Keywords = {abstract}, Owner = {Daniel}, Timestamp = {2015.06.01}, Url = {http://meetingorganizer.copernicus.org/EGU2015/EGU2015-4429.pdf} } @Article{Wytzsik2013, Title = {{Echtzeitinformation und Kollaboration in Geodateninfrastrukturen}}, Author = {Wytzsik, Andreas and Schmidt, Benno and N{\"u}st, Daniel}, author+an = {3=highlight}, Journal = {zfv (Zeitschrift f{\"u}r Geod{\"a}sie, Geoinformation und Landmanagement)}, Year = {2013}, Number = {5/2013}, Abstract = {Summary: Besides a number of initiatives focussing on the implementation of spatial data infrastructures on the various governmental levels, significant efforts are being made in order to implement the INSPIRE Directive 2007/2/EC. However, SDI development cannot keep up with the pace other information infrastructures are developing. Conceptual advancements of the SDI approach as well as technical innovations are rarely found. While highlighting selected developments in mainstream-IT, this article tries to identify potential candidates for (technical) advancements, which reflect the changing role of a user community that is increasingly part of the social web. The emphasis lies on the use of spatial data infrastructures for collaborative processes, for providing near real-time information, and the adaption of mainstream information technology trends. Zusammenfassung: Trotz beachtlicher Erfolge bei der Umsetzung der europäischen INSPIRE-Rahmenrichtlinie sowie vielfältiger Initiativen zum Auf- und Ausbau öffentlicher Geodateninfrastrukturen kann das Tempo der GDI-Entwicklung nicht mit dem anderer Informationsinfrastrukturen Schritt halten. Konzeptionelle Weiterentwicklungen des GDI-Ansatzes sowie technische Neuerungen finden sich nur selten. Der Artikel beleuchtet vor dem Hintergrund ausgewählter informationstechnologischer Entwicklungen Potenziale zur (technischen) Weiterentwicklung. Dabei stehen Nutzungsformen im Vordergrund, die aus der veränderten Rolle einer zunehmend durch soziale Medien geprägten Nutzergemeinde resultieren. Besonderes Augenmerk liegt hierbei auf der Nutzung von Geodateninfrastrukturen für kollaborative Prozesse, der Bereitstellung von Naheechtzeitinformation und der Adaption von Trends der Mainstream-Informationstechnologie.}, Owner = {Daniel}, Timestamp = {2014.10.11}, Url = {http://geodaesie.info/zfv/heftbeitrag/1945} } @article{Nuest_2018, doi = {10.7717/peerj.5072}, url = {https://doi.org/10.7717%2Fpeerj.5072}, year = 2018, month = {7}, publisher = {{PeerJ}}, volume = {6}, pages = {e5072}, author = {Daniel N{\"u}st and Carlos Granell and Barbara Hofer and Markus Konkol and Frank O. Ostermann and Rusne Sileryte and Valentina Cerutti}, author+an = {1=highlight}, title = {Reproducible research and {GIScience}: an evaluation using {AGILE} conference papers}, journal = {{PeerJ}} } @comment{jabref-meta: selector_keywords:Sensor Plug & Play;Sensor Web Enablemant;} @comment{jabref-meta: selector_journal:} @comment{jabref-meta: selector_publisher:} @comment{jabref-meta: selector_author:}