@article{Hevner2007, abstract = {As a commentary to Juhani Iivari's insightful essay, I briefly analyze design science research as an embodiment of three closely related cycles of activities. The Relevance Cycle inputs requirements from the contextual envi- ronment into the research and introduces the research artifacts into environ- mental field testing. The Rigor Cycle provides grounding theories and methods along with domain experience and expertise from the foundations knowledge base into the research and adds the new knowledge generated by the research to the growing knowledge base. The central Design Cycle sup- ports a tighter loop of research activity for the construction and evaluation of design artifacts and processes. The recognition of these three cycles in a research project clearly positions and differentiates design science from other research paradigms. The commentary concludes with a claim to the pragmatic nature}, author = {Hevner, Alan R}, isbn = {0905-0167}, issn = {09050167}, journal = {Scandinavian Journal of Information Systems}, keywords = {design cycle,design science,relevance cycle,rigor cycle}, number = {2}, pages = {87--92}, title = {{A Three Cycle View of Design Science Research}}, volume = {19}, year = {2007} } @article{Hevner2004, abstract = {Two paradigms characterize much of the research in the Information Systems discipline: behavioral science and design science. The behavioral science paradigm seeks to develop and verify theories that explain or predict human or organizational behavior. The design-science paradigm seeks to extend the boundaries of human and organizational capabilities by creating new and innovative artifacts. Both paradigms are foundational to the IS discipline, positioned as it is at the confluence of people, organizations, and technology. Our objective is to describe the performance of design-science research in Information Systems via a concise conceptual framework and clear guidelines for understanding, executing, and evaluating the research. In the design-science paradigm, knowledge and understanding of a problem domain and its solution are achieved in the building and application of the designed artifact. Three recent exemplars in the research literature are used to demonstrate the application of these guidelines. We conclude with an analysis of the challenges of performing high-quality design-science research in the context of the broader IS community.}, archivePrefix = {arXiv}, arxivId = {http://dl.acm.org/citation.cfm?id=2017212.2017217}, author = {Hevner, Alan R and March, Salvatore T and Park, Jinsoo and Ram, Sudha}, doi = {10.2307/25148625}, eprint = {/dl.acm.org/citation.cfm?id=2017212.2017217}, isbn = {0276-7783}, issn = {02767783}, journal = {MIS Quarterly}, keywords = {Information Systems research methodologies,business environment,creativity,design artifact,design science,experimental methods,search strategies,technology infrastructure}, number = {1}, pages = {75--105}, pmid = {12581935}, primaryClass = {http:}, title = {{Design Science in Information Systems Research}}, volume = {28}, year = {2004} } @book{Johannesson2014, abstract = {This book is an introductory text on design science, intended to support both graduate students and researchers in structuring, undertaking and presenting design science work. It builds on established design science methods as well as recent work on presenting design science studies and ethical principles for design science, and also offers novel instruments for visualizing the results, both in the form of process diagrams and through a canvas format. This work focuses on design science as applied to information systems and technology, but it also includes examples from, and perspectives of, other fields of human practice. --}, address = {Cham}, author = {Johannesson, Paul and Perjons, Erik}, booktitle = {Springer International Publishing Switzerland}, doi = {10.1007/978-3-319-10632-8}, isbn = {978-3-319-10631-1}, pages = {197}, publisher = {Springer International Publishing}, title = {{An Introduction to Design Science}}, year = {2014} } @article{Peffers2007, abstract = {JSTOR is a not-for-profit service that helps scholars, researchers, and students discover, use, and build upon a wide range of content in a trusted digital archive. We use information technology and tools to increase productivity and facilitate new forms of scholarship. For more information about JSTOR, please contact support@jstor.org.}, archivePrefix = {arXiv}, arxivId = {z0022}, author = {Peffers, Ken and Tuunanen, Tuure and Rothenberger, Marcus A and Chatterjee, Samir}, doi = {10.2753/MIS0742-1222240302}, eprint = {z0022}, isbn = {0742-1222}, issn = {0742-1222}, journal = {Journal of Management Information Systems}, month = {dec}, number = {3}, pages = {45--77}, pmid = {28843849}, title = {{A Design Science Research Methodology for Information Systems Research}}, volume = {24}, year = {2007} } @incollection{Sonnenberg2012, abstract = {The central outcome of design science research (DSR) is prescriptive knowledge in the form of IT artifacts and recommendations. However, prescrip-tive knowledge is considered to have no truth value in itself. Given this assumption, the validity of DSR outcomes can only be assessed by means of descriptive knowledge to be obtained at the conclusion of a DSR process. This is reflected in the build-evaluate pattern of current DSR methodologies. Recog-nizing the emergent nature of IT artifacts this build-evaluate pattern, however, poses unfavorable implications regarding the achievement of rigor within a DSR project. While it is vital in DSR to prove the usefulness of an artifact a ri-gorous DSR process also requires justifying and validating the artifact design it-self even before it has been put into use. This paper proposes three principles for evaluating DSR artifacts which not only address the evaluation of an arti-fact's usefulness but also the evaluation of design decisions made to build an artifact. In particular, it is argued that by following these principles the prescrip-tive knowledge produced in DSR can be considered to have a truth-like value.}, author = {Sonnenberg, Christian and vom Brocke, Jan}, booktitle = {Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)}, doi = {10.1007/978-3-642-29863-9_28}, isbn = {9783642298622}, issn = {03029743}, keywords = {Design science research,design theory,epistemology,evaluation}, pages = {381--397}, title = {{Evaluations in the Science of the Artificial – Reconsidering the Build-Evaluate Pattern in Design Science Research}}, volume = {7286 LNCS}, year = {2012} } @article{Venable2016, abstract = {Evaluation is a central and essential activity in conducting rigorous Design Science Research (DSR), yet there is surprisingly little guidance about designing the DSR evaluation activity beyond suggesting possible methods that could be used for evaluation. This paper extends the notable exception of the existing framework of Pries-Heje et al [11] to address this problem. The paper proposes an extended DSR evaluation framework together with a DSR evaluation design method that can guide DSR researchers in choosing an appropriate strategy for evaluation of the design artifacts and design theories that form the output from DSR. The extended DSR evaluation framework asks the DSR researcher to consider (as input to the choice of the DSR evaluation strategy) contextual factors of goals, conditions, and constraints on the DSR evaluation, e.g. the type and level of desired rigor, the type of artifact, the need to support formative development of the designed artifacts, the properties of the artifact to be evaluated, and the constraints on resources available, such as time, labor, facilities, expertise, and access to research subjects. The framework and method support matching these in the first instance to one or more DSR evaluation strategies, including the choice of ex ante (prior to artifact construction) versus ex post evaluation (after artifact construction) and naturalistic (e.g., field setting) versus artificial evaluation (e.g., laboratory setting). Based on the recommended evaluation strategy(ies), guidance is provided concerning what methodologies might be appropriate within the chosen strategy(ies).}, author = {Venable, John and Pries-Heje, Jan and Baskerville, Richard}, doi = {10.1057/ejis.2014.36}, isbn = {978-3-642-29862-2}, issn = {0960-085X}, journal = {European Journal of Information Systems}, keywords = {design science research,evaluation method,evaluation strategy,information,research methodology,systems evaluation}, month = {jan}, number = {1}, pages = {77--89}, title = {{FEDS: a Framework for Evaluation in Design Science Research}}, volume = {25}, year = {2016} } @book{Wieringa2014, abstract = {Abstract Design scientists have to balance the demands of methodological rigor that they share with purely curiosity-driven scientists, with the demands of practical utility that they share with utility-driven engineers. Balancing these conflicting demands can be ... $\backslash$n}, address = {Berlin, Heidelberg}, author = {Wieringa, Roel J.}, doi = {10.1007/978-3-662-43839-8}, isbn = {978-3-662-43838-1}, issn = {0270-5257}, keywords = {Design Science}, pages = {493}, publisher = {Springer Berlin Heidelberg}, title = {{Design Science Methodology for Information Systems and Software Engineering}}, year = {2014} }