#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ BotEngine Dependencies: * Python 3.8+ * requests - use "pip3 install requests" to install * dateutil - use "pip3 install python-dateutil" to install * pytz - use "pip3 install pytz" to install * dill - use "pip3 install dill" to install * lz4 - use "pip3 install lz4" to install * colorama - use "pip3 install colorama" to install * ijson - use "pip3 install ijson" to install pip install -r requirements.txt @author: David Moss, Destry Teeter @copyright: 2012 - 2025 People Power Company. All rights reserved. @contact: dmoss@caredaily.ai, destry@caredaily.ai """ import copy import datetime import pytz import importlib import json import logging import os import sys import time import urllib.parse import zipfile from argparse import ArgumentParser, RawDescriptionHelpFormatter from pathlib import Path # Our default server address DEFAULT_BASE_SERVER_URL = "app.peoplepowerco.com" __all__ = [] __version__ = "9.6.4" __date__ = "2025-06-13" DEBUG = 0 TESTRUN = 0 PROFILE = 0 BOT_CATEGORY_DICT = { "E": "Energy", "S": "Security", "C": "Care", "L": "Lifestyle", "H": "Health", "W": "Wellness", } VERSION_STATUS_DICT = { 0: "Waiting for upload", 1: "Privately Available", 2: "Submitted for Review", 3: "Under Review", 4: "Publicly Available", 5: "Bot Shop Rejected", 6: "Developer Rejected", 7: "Replaced by a newer version", 8: "Suspended", } LOGGING_LEVEL_DICT = { "debug": logging.DEBUG, "info": logging.INFO, "warn": logging.WARN, "error": logging.ERROR, } # Bot types for key retrieval BOT_KEY_TYPE_NORMAL = 0 BOT_KEY_TYPE_DEVELOPER = 1 # Bot Status Enum STATUS_BOT_INCOMPLETE = 0 STATUS_BOT_ACTIVE = 1 STATUS_BOT_INACTIVE = 2 # global variables _bot_loggers = None _bot_logger_config = None _https_proxy = None # Runtime classifiers for AWS Lambda RUNTIME_PYTHON_3_8 = 2 RUNTIME_PYTHON_3_9 = 3 RUNTIME_PYTHON_3_10 = 4 RUNTIME_PYTHON_3_11 = 5 RUNTIME_PYTHON_3_12 = 6 RUNTIME_PYTHON_3_13 = 7 # Default runtime for bots DEFAULT_RUNTIME_PYTHON = RUNTIME_PYTHON_3_12 # True to not delete the .tar file generated on upload; False to delete the .tar file (default) KEEP_BOT_TAR_FILE = False # WARNING - changing this maximum size could cause all applications that previously used a different size to blow their brains out. # The maximum variable size accepted by the server is 16,000,000 bytes # There's a maximum limit to a single HTTP POST # The maximum HTTP post size can be up to 2x as large as this maximum size # 8MB was selected to be quite safe, and result in a maximum 16MB HTTP POST which is two variables MAXIMUM_VARIABLE_SIZE_BYTES = 8000000 # Maximum integer size, declared because Python 2.7 has a max int concept but Python 3.x does not and we want to remain forward-compatible. MAXINT = 9223372036854775807 # Name of our core variable CORE_VARIABLE_NAME = "-core-" # Name of our internal variable to store Timer information TIMERS_VARIABLE_NAME = "[t]" # Minimum gap between timers TIMER_MIN_MS = 10 * 1000 # 10 seconds in milliseconds # Name of our internal variable to store Question/Answer information QUESTIONS_VARIABLE_NAME = "[q]" # Name of our internal variable to store the trigger count when running on the server COUNT_VARIABLE_NAME = "[c]" # For debugging variables: When variables are flushed to the server, also save them to a local file. SAVE_VARIABLES_TO_DEBUG_FILE = False # Keys for state variable properties in cache STATE_KEY_CONTENT = "c" STATE_KEY_PUBLISH = "p" STATE_KEY_UPDATE_LIST = "u" STATE_KEY_DELETE_LIST = "d" STATE_KEY_OVERWRITE = "o" # List of edge-computing capable gateway device types EDGE_CAPABLE_GATEWAY_DEVICE_TYPES = [32] # .tar or .zip the final bot package TAR = True # Default configurable bot runtime on the server in seconds (the server's default is 1.0 second) DEFAULT_RUNTIME_TIMEOUT_S = 510 # 8.5 minutes # Default configurable bot start key timeout on the server in seconds DEFAULT_START_KEY_TIMEOUT_S = 3 # Memory in MB required for the bot. # Default is 128 MB (minimum value). # Increments must be multiples of 64 MB. # Maximum value is 3008 MB. # AWS Lambda uses this memory size in pricing calculations DEFAULT_MEMORY = 128 # Default start analytic request retry limit DEFAULT_RUNTIME_TIMEOUT_RETRY_LIMIT = 5 # When downloading data that may contain commas, this character will replace those commas COMMA_DELIMITER_REPLACEMENT_CHARACTER = "&&" # These keys are used in microservices index files to identify device, location, and organization microsevices. # They're used to aggregate together multiple microservices index files into a unified JSON structure. DEVICE_MICROSERVICES_KEY = "DEVICE_MICROSERVICES" LOCATION_MICROSERVICES_KEY = "LOCATION_MICROSERVICES" DATA_FILTERS_KEY = "DATA_FILTER_MICROSERVICES" ORGANIZATION_MICROSERVICES_KEY = "ORGANIZATION_MICROSERVICES" # The index.py file lists all the microservices for the current bot and is imported by bot files MICROSERVICES_INDEX_FILENAME = "index.py" # The runtime.json file describes to the server what data sources and permissions this bot needs to access RUNTIME_FILENAME = "runtime.json" # The structure.json file describes the file/directory/microservices/dependencies of the current bot STRUCTURE_FILENAME = "structure.json" # The marketing filename describes how to present this bot and its capabilities in a future bot shop MARKETING_FILENAME = "marketing.json" # Information file generated during the creation of a bot, including the bundle ID INFO_FILENAME = "info.py" # The topic.json file describes the message topics that this bot can send to users TOPICS_FILENAME = "topics.json" # =============================================================================== # Main Function # =============================================================================== def main(argv=None): """ CLI botengine main entry point """ if argv is None: argv = sys.argv else: sys.argv.extend(argv) try: importlib.import_module("requests") except ImportError: sys.stderr.write("Missing the 'requests' module!\n") sys.stderr.write( "Please install this module by running 'pip3 install requests'\n" ) return 1 try: importlib.import_module("dateutil") except ImportError: sys.stderr.write("Missing the 'python-dateutil' module!\n") sys.stderr.write( "Please install this module by running 'pip3 install python-dateutil'\n" ) return 1 try: importlib.import_module("dill") except ImportError: sys.stderr.write("Missing the 'dill' module!\n") sys.stderr.write("Please install this module by running 'pip3 install dill'\n") return 1 try: importlib.import_module("colorama") except ImportError: sys.stderr.write("Missing the 'colorama' module!\n") sys.stderr.write( "Please install this module by running 'pip3 install colorama'\n" ) return 1 program_version = "v%s" % __version__ program_build_date = str(__date__) program_version_message = "%%(prog)s %s (%s)" % ( program_version, program_build_date, ) program_shortdesc = __import__("__main__").__doc__.split("\n")[1] program_license = """%s Created by David Moss Copyright 2025 People Power Company. All rights reserved. Distributed on an "AS IS" basis without warranties or conditions of any kind, either express or implied. USAGE """ % (program_shortdesc) # Cross-platform color compatibility import colorama colorama.init(autoreset=True) try: # Setup argument parser parser = ArgumentParser( description=program_license, formatter_class=RawDescriptionHelpFormatter, add_help=False, ) developer_group = parser.add_argument_group( Color.BOLD + "BOT LAB - Create and manage your own Bot services" + Color.END ) developer_group.add_argument( "--core", dest="core_directory", help="For repository structures that separate a shared core bot repository from a private proprietary repository, this is the absolute path to the core bot repository.", ) developer_group.add_argument( "--commit", dest="commit_bundle_id", help="Commit the given bot bundle to the server", ) developer_group.add_argument( "--my_developer_bots", dest="listapps", action="store_true", help="Get a list of the bots you created", ) developer_group.add_argument( "--publish", dest="publish_bundle_id", help="Submit this bot for review to become publicly available", ) developer_group.add_argument( "--makeitso", dest="make_it_so", help="Commit, publish, review, and approve - for senior admin-level bot developers only", ) developer_group.add_argument( "--approve", dest="approve_bundle_id", help="Used by administrators to approve a bot for publishing", ) developer_group.add_argument( "--botinfo", dest="info_bundle_id", nargs="+", help="Get the details of your given bot bundle", ) developer_group.add_argument( "--stats", dest="stats_bundle_id", nargs="+", help="Get the statistics of your given bot bundle", ) developer_group.add_argument( "--errors", dest="errors_bundle_id", nargs="+", help="Get the errors from your given bot bundle executing on the cloud across all users", ) developer_group.add_argument( "--logs", dest="logs_bundle_id", nargs="+", help="Get the logs from your given bot bundle executing on the cloud across all users", ) developer_group.add_argument( "--reject_bot_under_review", dest="reject_bundle_id", help="Reject the given bot from being reviewed or published publicly. Used after you --publish and need to make changes.", ) developer_group.add_argument( "--add_team_member", dest="add_team_member", action="store_true", help="Add a new member to the team.", ) developer_group.add_argument( "--delete_team_member", dest="delete_team_member", action="store_true", help="Delete a member of the team.", ) developer_group.add_argument( "--get_team_members", dest="get_team_members", action="store_true", help="View members of the team.", ) beta_group = parser.add_argument_group( Color.BOLD + "BOT BETA TESTING - Privately beta test your bots" + Color.END ) beta_group.add_argument( "--beta_test_bot", dest="beta_bundle_id", help="Specify a bot bundle ID to configure for beta testing. Typically used in conjunction with --beta_add_user and --beta_delete_user. If used alone, it returns a list of existing beta tester user IDs.", ) beta_group.add_argument( "--beta_add_user", dest="beta_add_user_id", help="Specify a user ID to add as a beta tester for the given --beta_test_bot, or used with --beta_purchase_bot for an admin to purchase the bot into the given account.", ) beta_group.add_argument( "--beta_delete_user", dest="beta_delete_user_id", help="Specify a user ID to remove as a beta tester for the given --beta_test_bot", ) beta_group.add_argument( "--beta_purchase_bot", dest="beta_purchase_bot", help="As an admin, purchase the given bot bundle ID into a user account to begin running it. Use this in conjunction with --beta_add_user to specify the recipient user ID.", ) run_group = parser.add_argument_group( Color.BOLD + "BOT ENGINE - Execute Bots" + Color.END ) run_group.add_argument( "-r", "--run", dest="run_bundle_id", help="Run a bot. Pass in the bundle identifier of the bot, which must also be the name of the bot's directory below your current working directory", ) run_group.add_argument( "-i", "--instance", dest="run_instance_id", help="Run the specific bot instance ID", ) run_group.add_argument( "-j", "--json", dest="json", help='The JSON that would be passed to the bot over the command line, in the format \'{"hello": "world"}\'', ) run_group.add_argument( "-a", "--apikey", dest="user_key", help="User's API key, instead of a username / password", ) run_group.add_argument( "-l", "--location", dest="location_id", help="Location ID" ) run_group.add_argument( "--servermode", dest="servermode", action="store_true", help="Run this bot on server environment", ) run_group.add_argument( "--https_proxy", dest="https_proxy", help="If your corporate network requires a proxy, type in the full HTTPS proxy address here (i.e. http://10.10.1.10:1080)", ) appstore_group = parser.add_argument_group( Color.BOLD + "BOT SHOP - Browse the Bot Shop and manage purchased bots." + Color.END ) appstore_group.add_argument( "--lookat", dest="view_bundle_id", help="View the details of an bot on the bot store", ) appstore_group.add_argument( "--purchase", dest="purchase_bundle_id", help="Obtain or purchase access to an bot on the bot store", ) appstore_group.add_argument( "--configure", dest="configure_bot_instance_id", help="Grant permission for an bot instance to access devices and communications", ) appstore_group.add_argument( "--my_purchased_bots", dest="my_purchased_bots", action="store_true", help="Get a list of the bots you have obtained or purchased", ) appstore_group.add_argument( "--pause", dest="pause_bot_instance_id", help="Stop the given bot instance from executing on your account", ) appstore_group.add_argument( "--play", dest="play_bot_instance_id", help="Resume execution of the given bot instance on your account", ) appstore_group.add_argument( "--delete", dest="delete_bot_instance_id", help="Delete the given bot instance ID or bundle ID out of my account", ) appstore_group.add_argument( "--permissions", dest="permissions_bot_instance_id", help="Discover what your purchased bot has permission to access", ) appstore_group.add_argument( "--questions", dest="questions_bot_instance_id", help="Answer questions asked by the given bot instance ID or bundle ID", ) optional_group = parser.add_argument_group( Color.BOLD + "Optional Arguments" + Color.END ) optional_group.add_argument( "-o", "--organization_id", dest="organization_id", help="Add in the organization ID we're talking about, used in conjunction with --purchase, --add_organization, --approve_organization, --remove_organization, and --get_organizations", ) optional_group.add_argument( "-h", "--help", dest="help", action="store_true", help="Show this help message and exit", ) optional_group.add_argument( "-u", "--username", dest="username", help="Username" ) optional_group.add_argument( "-st", "--save-states", dest="save_states", action="store_true", help="Set states after playback locally", ) optional_group.add_argument( "-fst", "--force-save-states", dest="force_save_states", action="store_true", help="Set states after playback locally", ) optional_group.add_argument( "-sp", "--save-priorities", dest="save_priorities", action="store_true", help="Set location priority after playback locally", ) optional_group.add_argument( "-sn", "--save-narratives", dest="save_narratives", action="store_true", help="Set location narratives after playback locally", ) optional_group.add_argument( "-p", "--password", dest="password", help="Password" ) optional_group.add_argument( "--admin_username", dest="admin_username", help="Administrative username" ) optional_group.add_argument( "--admin_password", dest="admin_password", help="Administrative password" ) optional_group.add_argument( "-s", "--server", dest="server", help="Base server URL (default is " + DEFAULT_BASE_SERVER_URL + ")", ) optional_group.add_argument( "-c", "--challenge", dest="challenge_id", help="Challenge ID" ) optional_group.add_argument( "--loglevel", dest="loglevel", choices=["debug", "info", "warn", "error"], default="info", help="The logging level, default is debug", ) optional_group.add_argument( "--httpdebug", dest="httpdebug", action="store_true", help="HTTP debug logger output", ) optional_group.add_argument( "--logfile", dest="logfile", help="Append the debug output to the given filename", ) optional_group.add_argument( "--zip", dest="zip", action="store_true", help="Commit the bot using the .zip (old) method of bot generation, instead of .tar (new) method.", ) tools_group = parser.add_argument_group( Color.BOLD + "Handy Developer Tools" + Color.END ) tools_group.add_argument( "--my_devices", dest="list_devices", action="store_true", help="Get a list of your devices", ) tools_group.add_argument( "--my_locations", dest="list_locations", action="store_true", help="Get a list of the locations your account has access to", ) tools_group.add_argument( "--user_id", dest="user_id", action="store_true", help="Get your user ID" ) tools_group.add_argument( "--device_types", dest="device_types", action="store_true", help="Get a list of available device types on this server", ) tools_group.add_argument( "--parameter", dest="parameter", help="Get a description of a specific parameter name", ) tools_group.add_argument( "--download_device", dest="download_device_id", help="Download data from a specific device ID in CSV format", ) tools_group.add_argument( "--download_type", dest="download_device_type", help="Download data from all devices of a specific device type in CSV format", ) # Can be used with the -o option! tools_group.add_argument( "--record", dest="record", action="store_true", help="Record all device and mode data from your account for rapid playback and bot testing", ) tools_group.add_argument( "--playback", dest="playback", help="Specify a recorded .json or zip filename to playback. Use the --run command to specify the bot.", ) tools_group.add_argument( "--playback_options", dest="playback_options", choices=["default", "merged"], default="default", help="The option for zip file, default is the whole data json file.", ) tools_group.add_argument( "--playback_to_now", dest="playback_to_now", action="store_true", help="Add this argument to --playback a past recording all the way to the current time, even though the recording potentially concluded a long time ago.", ) tools_group.add_argument( "--generate", dest="generate_bot_bundle_id", help="Generate the bot locally for analysis, without installing dependencies or uploading.", ) tools_group.add_argument( "--user_key", dest="get_user_key", action="store_true", help="Log in and retrieve the user API key.", ) tools_group.add_argument( "--admin_key", dest="get_admin_key", action="store_true", help="Log in and retrieve the admin API key.", ) tools_group.add_argument( "--sync_edge", dest="sync_edge", action="store_true", help="Command any edge-computing capable gateways to immediately resynchronize their bots.", ) # tools_group.add_argument("--download_media", dest="download_media", help="Download all videos and pictures from your account") settings_group = parser.add_argument_group( Color.BOLD + "Version Control" + Color.END ) settings_group.add_argument( "--version", action="version", version=program_version_message ) settings_group.add_argument( "--update", dest="update", action="store_true", help="Update this BotEngine framework from the server", ) # Subparsers subparsers = parser.add_subparsers(dest="actions") # 'bot_organizations' parser bot_organizations_parser = subparsers.add_parser("bot_organizations") add_organization_group = bot_organizations_parser.add_argument_group( Color.BOLD + "Add Organization" + Color.END ) add_organization_group.add_argument( "--add_organization", dest="add_bot_organization", help="Allow the given Bundle ID to be purchased by the given --organization", ) approve_organization_group = bot_organizations_parser.add_argument_group( Color.BOLD + "Approve Organization" + Color.END ) approve_organization_group.add_argument( "--approve_organization", dest="approve_bot_organization", help="Approve the given Bundle ID for the given --organization", ) approve_organization_group.add_argument( "-s", "--status", dest="bot_organization_status", help="Status of the bot. 1 to approve, 2 to reject.", ) approve_organization_group.add_argument( "-d', '--development", dest="bot_organization_development", action="store_true", help="This bot is for testing and development purposes. Default is 'false'.", ) remove_organization_group = bot_organizations_parser.add_argument_group( Color.BOLD + "Remove Organization" + Color.END ) remove_organization_group.add_argument( "--remove_organization", dest="remove_bot_organization", help="Bundle ID to prevent from being purchased by the --organization", ) get_organizations_group = bot_organizations_parser.add_argument_group( Color.BOLD + "Get Organizations" + Color.END ) get_organizations_group.add_argument( "--get_organizations", dest="get_bot_organizations", help="Get a list of the organizations you have access to", ) # 'bot_shop_search' parser bot_shop_search_parser = subparsers.add_parser("bot_shop_search") bot_shop_search_parser.add_argument( "--search_by", dest="bot_shop_search_search_by", help="Search in name, author, keywords, bundle. Use * for a wildcard.", ) bot_shop_search_parser.add_argument( "--categories", dest="bot_shop_search_categories", nargs="+", help="Category search. i.e. 'S', 'E', etc. Multiple values are allowed and OR-ed.", ) bot_shop_search_parser.add_argument( "--compatible", dest="bot_shop_search_compatible", help="Filter by bots that are compatible with our user account or not, leave blank to return all bots", ) bot_shop_search_parser.add_argument( "--lang", dest="bot_shop_search_lang", help="Language filter, leave blank to return bots in all languages", ) bot_shop_search_parser.add_argument( "--core", dest="bot_shop_search_core", help="Filter by core class" ) bot_shop_search_parser.add_argument( "--object_names", dest="bot_shop_search_object_names", nargs="+", help="Show objects with such name(s). Multiple values are allowed.", ) bot_shop_search_parser.add_argument( "--limit", dest="bot_shop_search_limit", help="Limit the response size" ) # 'distribution' parser distribution_parser = subparsers.add_parser("distribution") tagrelease_group = distribution_parser.add_argument_group( Color.BOLD + "Tag Release" + Color.END ) tagrelease_group.add_argument( "--tag_release", dest="tag_release", action="store_true", help="Tag and push a new BotEngine release", ) tagrelease_group.add_argument( "-v", "--version", dest="new_version", help="New version for this release" ) # 'execution_history' subparser execution_parser = subparsers.add_parser("execution") execution_parser.add_argument( "-i", "--instance_id", dest="app_instance_id", help="Filter by bot instance ID.", ) execution_parser.add_argument( "-f", "--execution_flow", dest="execution_flow", help="Filter by the execution flow.", ) get_execution_history_group = execution_parser.add_argument_group( Color.BOLD + "Get Execution History" + Color.END ) get_execution_history_group.add_argument( "--execution_history", dest="execution_history", action="store_true", help="Get the execution history of your given bot bundle", ) get_execution_history_group.add_argument( "-b", "--bundle", dest="bundle", help="Bot bundle ID. The parameter is required if the bot instance ID is not specified.", ) get_execution_history_group.add_argument( "-d", "--developer", dest="developer", help="'True' means the developer version, 'false' (default) means public version. The parameter is ignored if the bot instance ID is specified.", ) get_execution_history_group.add_argument( "-t", "--trigger", dest="trigger", help="Filter by trigger." ) get_execution_history_group.add_argument( "-e", "--errors_only", dest="errors_only", help="If true then only failed executions are returned.", ) get_execution_history_group.add_argument( "--start_date_ms", dest="start_date_ms", help="Execution history start date.", ) get_execution_history_group.add_argument( "--end_date_ms", dest="end_date_ms", help="Execution history end date." ) get_execution_history_group.add_argument( "--row_count", dest="row_count", help="The maximun number of the records to display (default is 200).", ) get_execution_history_group.add_argument( "--sort_order", dest="sort_order", help="asc (oldest executions first) or desc (newest executions first).", ) get_execution_info_group = execution_parser.add_argument_group( Color.BOLD + "Get Execution Info" + Color.END ) get_execution_info_group.add_argument( "--get_execution_info", dest="get_execution_info", action="store_true", help="Information about specific execution of particular bot instance, including the log data.", ) get_execution_info_group.add_argument( "--request_date_ms", dest="request_date_ms", help="Execution datetime millis.", ) # 'bot_instance_logging' subparser logging_parser = subparsers.add_parser("bot_instance_logging") logging_parser.add_argument( "-i", "--instance_id", dest="app_instance_id", help="Bot Instance ID" ) logging_parser.add_argument( "-f", "--execution_flow", dest="execution_flow", help="Execution Flow" ) set_cloudwatch_logging_group = logging_parser.add_argument_group( Color.BOLD + "Enable or Disable Cloudwatch Logging" + Color.END ) set_cloudwatch_logging_group.add_argument( "--set_cloudwatch_logging", dest="set_cloudwatch_logging", action="store_true", help="Enable/disable logging in AWS CloudWatch for a specific bot instance and flow.", ) set_cloudwatch_logging_group.add_argument( "-s", "--status", dest="logging_status", help="Logging Status. 1 to enable, 2 to disable.", ) set_cloudwatch_logging_group.add_argument( "-d", "--end_date_ms", dest="logging_end_date_ms", help="Logging end date in milliseconds. Required if status=1.", ) get_cloudwatch_log_info_group = logging_parser.add_argument_group( Color.BOLD + "Cloudwatch Log Info" + Color.END ) get_cloudwatch_log_info_group.add_argument( "--get_cloudwatch_log_info", dest="get_cloudwatch_log_info", action="store_true", help="Get a description of the bot instance log stream in AWS CloudWatch.", ) # 'bot_instance_log_export' subparser logging_export_parser = subparsers.add_parser("bot_instance_log_export") set_cloudwatch_logging_group = logging_export_parser.add_argument_group( Color.BOLD + "Create an export task" + Color.END ) set_cloudwatch_logging_group.add_argument( "--create_an_export_task", dest="create_an_export_task", action="store_true", help="Create the export task to export the log stream.", ) set_cloudwatch_logging_group.add_argument( "-i", "--instance_id", dest="app_instance_id", help="Bot Instance ID" ) set_cloudwatch_logging_group.add_argument( "-f", "--execution_flow", dest="execution_flow", help="Execution Flow" ) set_cloudwatch_logging_group.add_argument( "-s", "--start_date_ms", dest="start_date_ms", help="Start date of log events to export.", ) set_cloudwatch_logging_group.add_argument( "-e", "--end_date_ms", dest="end_date_ms", help="End date of log events to export.", ) get_export_status_group = logging_export_parser.add_argument_group( Color.BOLD + "Get Export Status" + Color.END ) get_export_status_group.add_argument( "--get_export_status", dest="get_export_status", action="store_true", help="Inspect the status of an export task and obtain the pre-signed URL(s) to the exported files.", ) get_export_status_group.add_argument( "-t", "--task_id", dest="task_id", help="An export task ID" ) # 'engage_kit' parser engage_kit_parser = subparsers.add_parser("engage_kit") engage_kit_parser.add_argument( "-l", "--lang", dest="language", help="Preferred content language" ) message_topics_group = engage_kit_parser.add_argument_group( Color.BOLD + "Message Topics" + Color.END ) message_topics_group.add_argument( "-b", "--bundle", dest="bundle_id", help="Bot bundle ID. The parameter is required if the bot instance ID is not specified.", ) message_topics_group.add_argument( "--update_message_topics_for_bundle", dest="update_message_topics", action="store_true", help="Update message topics for a given bot bundle.", ) message_topics_group.add_argument( "--get_message_topics", dest="get_message_topics", action="store_true", help="Get a list of existing message topics. If a bundle ID is provided, only the topics for that bundle are returned.", ) messages_group = engage_kit_parser.add_argument_group( Color.BOLD + "Messages" + Color.END ) messages_group.add_argument( "-j", "--json", dest="messages_json", help="Messages json content" ) messages_group.add_argument( "-t", "--topic_id", dest="topic_id", help="Topic ID" ) messages_group.add_argument( "-s", "--start_date", dest="messages_start_date", help="Start date to filter by delivery date.", ) messages_group.add_argument( "-e", "--end_date", dest="messages_end_date", help="End date to filter by delivery date", ) messages_group.add_argument( "-r", "--read_status", dest="messages_read_status", help="Message read status", ) messages_group.add_argument( "-m", "--message_id", dest="message_id", help="Message ID" ) messages_group.add_argument( "--create_messages", dest="create_messages", action="store_true", help="Create new messages at a location.", ) messages_group.add_argument( "--update_messages", dest="update_messages", action="store_true", help="Update message statuses and delivery times.", ) messages_group.add_argument( "--get_messages", dest="get_messages", action="store_true", help="Get messages at a location.", ) messages_group.add_argument( "--update_message_read_status", dest="update_message_read_status", action="store_true", help="Mark a message as read or unread.", ) # Process arguments args = parser.parse_args() sys.argv = [] import uuid session_id = str(uuid.uuid4()).split("-")[-1] global playback_session_id playback_session_id = session_id if args.playback: logger_session_id = session_id else: logger_session_id = None global _bot_logger_config _bot_logger_config = { "level": LOGGING_LEVEL_DICT[args.loglevel], "console_mode": True, "filename": args.logfile, "playback": args.playback, "session_id": logger_session_id, } global _bot_loggers _bot_loggers = {"botengine": _create_logger("botengine", **_bot_logger_config)} if args.help: parser.print_help() return 0 if args.actions == "distribution": if args.tag_release: tag_release( _bot_loggers["botengine"], args.new_version, args.core_directory ) return username = args.username password = args.password botname = args.run_bundle_id instance = args.run_instance_id server = args.server challenge_id = args.challenge_id commit = args.commit_bundle_id update = args.update publish = args.publish_bundle_id reject = args.reject_bundle_id httpdebug = args.httpdebug listdevices = args.list_devices listapps = args.listapps botinfo = args.info_bundle_id botstats = args.stats_bundle_id boterrors = args.errors_bundle_id botlogs = args.logs_bundle_id forever = args.json is None download_device_id = args.download_device_id download_device_type = args.download_device_type appstore_view_bundle_id = args.view_bundle_id my_purchased_bots = args.my_purchased_bots delete_bot_instance_id = args.delete_bot_instance_id purchase_bundle_id = args.purchase_bundle_id configure_bot_instance_id = args.configure_bot_instance_id pause_bot_instance_id = args.pause_bot_instance_id play_bot_instance_id = args.play_bot_instance_id organization_id = args.organization_id permissions_bot_instance_id = args.permissions_bot_instance_id devicetypes = args.device_types parameter = args.parameter user_key = args.user_key record = args.record playback = args.playback beta_add_user_id = args.beta_add_user_id beta_purchase_bot = args.beta_purchase_bot beta_delete_user_id = args.beta_delete_user_id beta_test_bot = args.beta_bundle_id generate = args.generate_bot_bundle_id user_id = args.user_id approve = args.approve_bundle_id admin_username = args.admin_username admin_password = args.admin_password save_states = args.save_states force_save_states = args.force_save_states save_priorities = args.save_priorities save_narratives = args.save_narratives location_id = args.location_id list_locations = args.list_locations sync_edge = args.sync_edge add_team_member = args.add_team_member delete_team_member = args.delete_team_member get_team_members = args.get_team_members if organization_id is not None: print( Color.BOLD + "Organization ID: {}".format(organization_id) + Color.END ) organization_id = int(organization_id) elif location_id is not None: print(Color.BOLD + "Location ID: {}".format(location_id) + Color.END) location_id = int(location_id) global _https_proxy _https_proxy = None if args.https_proxy is not None: _https_proxy = {"https": args.https_proxy} if args.zip is not None: if args.zip: global TAR TAR = False make_it_so = args.make_it_so if make_it_so is not None: commit = make_it_so publish = make_it_so if approve: publish = approve if httpdebug: # These lines enable debugging at httplib level (requests->urllib3->http.client) # You will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA. # The only thing missing will be the response.body which is not logged. # You must initialize logging, otherwise you'll not see debug output. logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) requests_log = logging.getLogger("requests.packages.urllib3") requests_log.setLevel(logging.DEBUG) requests_log.propagate = True # local variables user_info = None botengine_key = None inputs = None if args.json is not None: json_input = args.json if json_input.startswith("'"): json_input = json_input[1:] if json_input.endswith("'"): json_input = json_input[:-1] try: inputs = json.loads(str(json_input)) botengine_key = inputs["apiKey"] server = inputs["apiHost"] except Exception as e: sys.stderr.write(f"Couldn't parse the JSON input data {e}\n") sys.stderr.write(json_input) sys.stderr.write("\n\n") return 1 if not server: server = DEFAULT_BASE_SERVER_URL if "http" not in server: server = "https://" + server # Normalize server var with api host response server = _get_cloud_api_server(server) print("Bot Server: " + server) if args.get_user_key: user_key = _login(server, username, password) print("User Key: {}".format(user_key)) return 0 if args.get_admin_key: if admin_username is None or admin_password is None: print( "Use --admin_username and --admin_password to perform this operation" ) return -1 admin_key = _login(server, admin_username, admin_password, admin=True) print("Admin Key: {}".format(admin_key)) return 0 if listdevices: ### Developer helper method: get a list of devices in your account ### if user_key is None: user_key = _login(server, username, password) _summarize_devices(server, user_key, location_id) return 0 if list_locations: if user_key is None: user_key = _login(server, username, password) user_info = _get_user_info(server, user_key) if "locations" not in user_info: print( Color.RED + "This user account does not have access to any locations.\n\n" + Color.END ) else: print(Color.BOLD + "Locations" + Color.END) print("-" * 50) for location_object in user_info["locations"]: name = "[No Name]" if 'name' in location_object: name = location_object['name'] print( "\t{}{}{}: {}".format( Color.BOLD, str(location_object["id"]), Color.END, name, ) ) print() return 0 if sync_edge: # Command any edge-computing capable gateways to immediately resynchronize their bots if user_key is None: user_key = _login(server, username, password) devices = _get_devices_from_location(server, user_key, location_id) synced = False for device in devices: if "type" in device: if device["type"] in EDGE_CAPABLE_GATEWAY_DEVICE_TYPES: print("=> Synchronizing {}...".format(device["id"])) synced = True _send_command( server, user_key, location_id, device["id"], "bot", None ) if not synced: print("=> No edge-computing capable gateways found.") print() return 0 if devicetypes: if user_key is None: user_key = _login(server, username, password) _summarize_device_types(server, user_key) return 0 if parameter: if user_key is None: user_key = _login(server, username, password) p = _get_parameters(server, user_key, parameter) if "deviceParams" not in p: print( Color.RED + "This parameter is not defined on this server." + Color.END ) else: p = p["deviceParams"][0] # Name print(Color.BOLD + Color.GREEN + parameter + Color.END) # Type - numeric or non-numeric if "numeric" in p: if p["numeric"]: print(Color.BOLD + "Type: " + Color.END + "Numeric values") else: print(Color.BOLD + "Type: " + Color.END + "Non-numeric values") # Units if "systemUnit" in p: print(Color.BOLD + "Units: " + Color.END + p["systemUnit"]) # Decimal place accuracy if "scale" in p: print( Color.BOLD + "Accuracy: " + Color.END + "Store up to " + str(p["scale"]) + " digits after the decimal" ) # Usage - measurements and/or commands profiled = False configured = False historical = None if "profiled" in p: profiled = p["profiled"] if "configured" in p: configured = p["configured"] if "historical" in p: historical = p["historical"] if profiled and configured: print(Color.BOLD + "Usage: " + Color.END + "Measurement & Command") elif profiled and not configured: print(Color.BOLD + "Usage: " + Color.END + "Measurement only") elif configured and not profiled: print(Color.BOLD + "Usage: " + Color.END + "Command only") # Storage if historical is not None: if historical == 0: print( Color.BOLD + "Storage: " + Color.END + "Current state only, no database history" ) elif historical == 1: print( Color.BOLD + "Storage: " + Color.END + "Update database history every 15 minutes or when the value changes more than 25%" ) elif historical == 2: print( Color.BOLD + "Storage: " + Color.END + "Update database history on every change" ) print("\n" + the_bot() + "Done!") return 0 if listapps: ### Developer helper method: get a list of your bots ### if user_key is None: user_key = _login(server, username, password) _summarize_apps(server, user_key) return 0 if beta_purchase_bot: # Purchase the bot as a beta tester if user_key is None: user_key = _login(server, username, password) bundle = beta_purchase_bot bundle = bundle.replace("/", "") j = _beta_purchase_bot(server, user_key, bundle, beta_add_user_id) print("Result: " + json.dumps(j, indent=2, sort_keys=True)) print() return 0 if beta_add_user_id or beta_delete_user_id: if beta_test_bot is None: sys.stderr.write("Need to also specify a --beta_test_bot bundle ID.\n") return 1 if user_key is None: user_key = _login(server, username, password) if beta_add_user_id: print( "Adding user ID " + str(beta_add_user_id) + " to beta test " + beta_test_bot + "..." ) _add_bot_beta_testers(server, user_key, beta_test_bot, beta_add_user_id) else: _delete_bot_beta_testers( server, user_key, beta_test_bot, beta_delete_user_id ) # Fall through to print out the current list of beta testers if beta_test_bot: if user_key is None: user_key = _login(server, username, password) j = _get_bot_beta_testers(server, user_key, beta_test_bot) if "betaTesters" in j: if len(j["betaTesters"]) == 0: print("There are no beta testers for this bot.") else: if len(j["betaTesters"]) == 1: print("There is one beta tester for this bot.") else: print( "There are " + str(j["betaTesters"]) + " beta testers for this bot." ) for tester in j["betaTesters"]: print("\t* User ID: " + str(tester["userId"])) # print("Beta testers: " + json.dumps(j, indent=2, sort_keys=True)) print() return 0 if user_id: if user_key is None: user_key = _login(server, username, password) user_info = _get_user_info(server, user_key) print( "User ID: {}{}{}\n".format( Color.BOLD, user_info["user"]["id"], Color.END ) ) return 0 if botinfo: ### Developer helper method: get the details of your given bot bundle ### if user_key is None: user_key = _login(server, username, password) for bundle in botinfo: bundle = bundle.replace("/", "") try: j = _get_apps(server, user_key, bundle) except Exception as e: _bot_loggers["botengine"].warning( "Failed to gather Info: {}".format(e) ) continue bots = j.get("apps") if bots is None: sys.stderr.write( "The bot '" + bundle + "' does not belong to you, or you haven't created it yet.\n" ) return 1 sys.stderr.write( Color.BOLD + "\nAPP MARKETING INFO ON THE SERVER\n" + Color.END ) sys.stderr.write(json.dumps(bots[0], indent=2, sort_keys=True)) sys.stderr.write("\n\n") j = _get_versions(server, user_key, bundle) versions = j.get("versions") if versions is None: sys.stderr.write( Color.RED + "The bot '" + bundle + "' does not have any version!" + Color.END ) sys.stderr.write( Color.RED + "\nUse 'botengine --commit " + bundle + "' to commit your first version.\n\n" + Color.END ) return 1 sys.stderr.write( Color.BOLD + "\nAPP VERSION INFO ON THE SERVER\n" + Color.END ) sys.stderr.write(json.dumps(versions, indent=2, sort_keys=True)) sys.stderr.write("\n\n") return 0 if botstats: ### Developer helper method: get the details of your given bot bundle ### if user_key is None: user_key = _login(server, username, password) for bundle in botstats: bundle = bundle.replace("/", "") print("-" * 50) print("- {}".format(bundle)) try: stats = _get_app_statistics(server, user_key, bundle) except Exception as e: _bot_loggers["botengine"].warning( "Failed to gather Stats: {}".format(e) ) continue _bot_loggers["botengine"].debug( "Stats: {}".format(json.dumps(stats, indent=2)) ) if "rating" in stats: print(Color.BOLD + "RATINGS" + Color.END) print( "Average rating across all versions: " + str(stats["rating"]["average"]) ) stars_dict = stats["rating"] TOTAL_STARS = 20 ordered_keys = ["star5", "star4", "star3", "star2", "star1"] if stars_dict["total"] > 0: for key in ordered_keys: stars = int( (float(stars_dict[key]) / float(stars_dict["total"])) * TOTAL_STARS ) title = ( Color.BOLD + key.replace("star", "") + "-stars" + Color.END ) print( title + ": [" + "*" * stars + " " * (TOTAL_STARS - stars) + "]" ) versions_dict = {} if "versions" in stats: for v in stats["versions"]: versions_dict[int(v["creationDateMs"])] = v i = 0 for key in sorted(versions_dict.keys(), reverse=False): i += 1 print("-" * 50) if i == len(versions_dict.keys()): print(Color.GREEN + Color.BOLD + "NEWEST VERSION" + Color.END) print( Color.BOLD + "VERSION " + str(versions_dict[key]["version"]) + Color.END ) print( "\t" + Color.BOLD + "Creation Date: " + Color.END + str(versions_dict[key]["creationDate"]) ) print( "\t" + Color.BOLD + "Average Execution Time: " + Color.END + Color.PURPLE + str(versions_dict[key]["averageExecutionTime"]) + " [ms]" + Color.END ) print( "\t" + Color.BOLD + "Total Executions: " + Color.END + str(versions_dict[key]["totalExecutions"]) ) print( "\t" + Color.BOLD + "Failed Executions: " + Color.END + str(versions_dict[key]["failedExecutions"]) ) if float(versions_dict[key]["totalExecutions"]) > 0: print( "\t" + Color.BOLD + "Failure Rate: " + Color.END + "%.2f" % ( 100 * ( float(versions_dict[key]["failedExecutions"]) / float(versions_dict[key]["totalExecutions"]) ) ) + "%" ) print( "\t" + Color.BOLD + "Current Status: " + Color.END + str(VERSION_STATUS_DICT[versions_dict[key]["status"]]) ) print( "\t" + Color.BOLD + "Status updated on: " + Color.END + str(versions_dict[key]["statusChangeDate"]) ) print() if "rating" in versions_dict: stars_dict = versions_dict[key]["rating"] if stars_dict["total"] > 0: for key in ordered_keys: stars = int( ( float(stars_dict[key]) / float(stars_dict["total"]) ) * TOTAL_STARS ) title = key.replace("star", "") + "-stars" print( "\t" + title + ": [" + "*" * stars + " " * (TOTAL_STARS - stars) + "]" ) else: print("\tNo ratings.") print("\n") print(Color.GREEN + Color.BOLD + "TOTAL STATISTICS" + Color.END) if "totalCurrentUsers" in stats: print( Color.BOLD + "Total current users: " + Color.END + str(stats["totalCurrentUsers"]) ) if "totalCurrentInstances" in stats: print( Color.BOLD + "Total current bot instances: " + Color.END + str(stats["totalCurrentInstances"]) ) if "totalExecutions" in stats: print( Color.BOLD + "Total executions: " + Color.END + str(stats["totalExecutions"]) ) if "totalExecutionTime" in stats: print( Color.BOLD + "Total execution time: " + Color.END + str(float(stats["totalExecutionTime"]) / 1000.0) + " [sec]; %.2f" % (float(stats["totalExecutionTime"]) / 1000.0 / 60.0 / 60.0) + " [hours]; %.2f" % ( float(stats["totalExecutionTime"]) / 1000.0 / 60.0 / 60.0 / 24.0 ) + " [days]; %.2f" % ( float(stats["totalExecutionTime"]) / 1000.0 / 60.0 / 60.0 / 24.0 / 30.0 ) + " [months]" ) print() return 0 # if listversions: # ### Developer helper method: get a list of your bots ### # user_key = _login(server, username, password) # _summarize_versions(server, key) # return 0 if boterrors: if user_key is None: user_key = _login(server, username, password) for bundle in boterrors: bundle = bundle.replace("/", "") exists = False try: j = _get_bot_errors(server, user_key, bundle, developer=False) print(Color.BOLD + "\n\nPUBLIC VERSION" + Color.END) print(json.dumps(j, indent=2, sort_keys=True)) exists = True except Exception as e: print(f"Error getting public version {e}") pass try: j = _get_bot_errors(server, user_key, bundle, developer=True) print(Color.BOLD + "\n\nDEVELOPER VERSION" + Color.END) print(json.dumps(j, indent=2, sort_keys=True)) exists = True except Exception as e: print(f"Error getting developer version {e}") pass if not exists: print( Color.BOLD + Color.RED + "\n\nThat bot does not exist or you do not have access." + Color.END ) # =================================================================== # for r in j['results']: # print(Color.BOLD + "Bot: " + Color.END + str(r['bundle']) + "; Version " + str(r['version'])) # print(Color.BOLD + "Instance ID: " + Color.END + str(r['appInstanceId'])) # print(Color.BOLD + "Timestamp: " + Color.END + str(r['requestDate'])) # # print("") # print(str(r['executionLog'])) # print("\n\n") # =================================================================== return 0 if botlogs: if user_key is None: user_key = _login(server, username, password) for bundle in botlogs: bundle = bundle.replace("/", "") try: j = _get_bot_errors(server, user_key, bundle, errors_only=False) except Exception as e: _bot_loggers["botengine"].warning( "Failed to gather Logs: {}".format(e) ) continue print(json.dumps(j, indent=2, sort_keys=True)) # =================================================================== # for r in j['results']: # print(Color.BOLD + "Bot: " + Color.END + str(r['bundle']) + "; Version " + str(r['version'])) # print(Color.BOLD + "Instance ID: " + Color.END + str(r['appInstanceId'])) # print(Color.BOLD + "Timestamp: " + Color.END + str(r['requestDate'])) # # print("") # print(str(r['executionLog'])) # print("\n\n") # =================================================================== return 0 if appstore_view_bundle_id: # View a particular bot on the bot store if user_key is None: user_key = _login(server, username, password) appstore_view_bundle_id = appstore_view_bundle_id.replace("/", "") app_info = _botstore_botinfo(server, user_key, appstore_view_bundle_id) sys.stderr.write(json.dumps(app_info, indent=2, sort_keys=True)) sys.stderr.write("\n\n") return 0 if my_purchased_bots: # Get a list of my purchased bots if organization_id is not None: if user_key is None: if admin_username is None or admin_password is None: print( "Use --admin_username and --admin_password to perform this operation" ) return -1 user_key = _login( server, admin_username, admin_password, admin=True ) else: if user_key is None: user_key = _login(server, username, password) bots = _botstore_mybots( server, user_key, location_id=location_id, organization_id=organization_id, ) # print("BOTS: {}".format(json.dumps(bots, indent=2, sort_keys=True))) if bots is not None: for bot in bots: status = Color.RED + "Unknown status" + Color.END development = False if "development" in bot: development = bot["development"] if development: developer_text = Color.RED + "[DEVELOPMENT VERSION]" + Color.END else: developer_text = ( Color.GREEN + "[PRODUCTION VERSION]" + Color.END ) if bot["status"] == 0: status = Color.RED + "NOT CONFIGURED" + Color.END elif bot["status"] == 1: status = Color.GREEN + "ACTIVE" + Color.END if "active" in bot: if not bot["active"]: status = ( Color.RED + "PAUSED (--play to resume)" + Color.END ) elif bot["status"] == 2: status = Color.RED + "PAUSED (--play to resume)" + Color.END bundle = bot.get("bundle") if bundle is None: bundle = bot.get("bot", {}).get("bundle") version = bot.get("version") if not isinstance(version, str): version = version.get("version") print( "Bot Instance " + str(bot["appInstanceId"]) + ": " + bundle + "; Version " + version + " " + developer_text + "; " + status ) else: sys.stderr.write( Color.RED + "You have not obtained or purchased any bots." + Color.END ) print("\n" + the_bot() + "Done!") return 0 if pause_bot_instance_id: # Stop an bot instance from actively executing in your account if user_key is None: user_key = _login(server, username, password) try: _botstore_mybots( server, user_key, pause_bot_instance_id, location_id=location_id, organization_id=organization_id, ) except BotError: bundle = pause_bot_instance_id print("Trying bundle " + bundle) bundle = bundle.replace("/", "") pause_bot_instance_id = _get_instance_id_from_bundle_id( server, user_key, bundle ) if pause_bot_instance_id is None: sys.stderr.write( Color.RED + "This bot instance is not in your personal account.\n\n" + Color.END ) return 1 print( "Found bot instance " + Color.BOLD + str(pause_bot_instance_id) + Color.END + " matching the bundle ID you provided" ) result = _botstore_configure( server, user_key, pause_bot_instance_id, None, STATUS_BOT_INACTIVE, location_id=location_id, ) if result: print(the_bot() + "Paused!") return 0 else: print("Something went wrong during configuration.") return 1 if play_bot_instance_id: # Resume executing an bot in your account if user_key is None: user_key = _login(server, username, password) try: current_app_configuration = _botstore_mybots( server, user_key, play_bot_instance_id, location_id=location_id, organization_id=organization_id, ) except Exception: bundle = play_bot_instance_id print("Trying bundle " + play_bot_instance_id) bundle = bundle.replace("/", "") play_bot_instance_id = _get_instance_id_from_bundle_id( server, user_key, bundle, location_id=location_id ) if play_bot_instance_id is None: sys.stderr.write( Color.RED + "This bot instance is not in your personal account.\n\n" + Color.END ) return 1 print( "Found bot instance " + Color.BOLD + str(play_bot_instance_id) + Color.END + " matching the bundle ID you provided" ) result = _botstore_configure( server, user_key, play_bot_instance_id, None, STATUS_BOT_ACTIVE, location_id=location_id, ) if result: print(the_bot() + "Resuming execution!") return 0 else: print("Something went wrong during configuration.") return 1 if delete_bot_instance_id: if user_key is None: user_key = _login(server, username, password) if _botstore_deletebot( server, user_key, delete_bot_instance_id, location_id ): print( the_bot() + "Bot instance " + str(delete_bot_instance_id) + " deleted!" ) else: delete_bot_instance_id = delete_bot_instance_id.replace("/", "") delete_bot_instance_id = _get_instance_id_from_bundle_id( server, user_key, delete_bot_instance_id, location_id=location_id ) print( "Found bot instance " + Color.BOLD + str(delete_bot_instance_id) + Color.END + " matching the bundle ID you provided" ) if _botstore_deletebot( server, user_key, delete_bot_instance_id, location_id ): print( the_bot() + "Bot instance " + str(delete_bot_instance_id) + " deleted!" ) else: print( Color.RED + "That bot instance is not in your account." + Color.END ) sys.stderr.write("\n\n") return 0 # QUESTIONS if args.questions_bot_instance_id: if user_key is None: user_key = _login(server, username, password) instance_id = None try: instance_id = int(args.questions_bot_instance_id) except ValueError: bundle = args.questions_bot_instance_id print("Trying bundle " + bundle) bundle = bundle.replace("/", "") instance_id = _get_instance_id_from_bundle_id( server, user_key, bundle, location_id=location_id ) if instance_id is None: sys.stderr.write( Color.RED + "This bot instance is not in your personal account.\n\n" + Color.END ) return 1 print( "Found bot instance " + Color.BOLD + str(instance_id) + Color.END + " matching the bundle ID you provided" ) all_questions = False while True: if all_questions: response = _get_questions( server, user_key, answer_status=(1, 2, 3, 4), location_id=location_id, ) else: response = _get_questions( server, user_key, instance_id=instance_id, answer_status=(1, 2, 3, 4), location_id=location_id, ) if "questions" not in response: sys.stderr.write( Color.RED + "\n\nThis bot has asked no questions." + Color.END ) print( "QUESTIONS: " + str(json.dumps(response, indent=2, sort_keys=True)) ) questions = [] if "questions" in response: questions = response["questions"] # Front page questions are just a list front_page_questions = [] # Editable questions are a dictionary of lists, with the dictionary element being the section ID editable_questions = {} # Questions by ID questions_by_id = {} # Question ID question_id = 0 for q in questions: if "front" in q: if q["front"]: front_page_questions.append(q) questions_by_id[question_id] = q question_id += 1 if "editable" in q: if q["editable"]: if q["sectionId"] not in editable_questions: editable_questions[q["sectionId"]] = [] editable_questions[q["sectionId"]].append(q) for section_id in sorted(editable_questions.keys()): editable_questions[section_id].sort( key=lambda x: x["questionWeight"], reverse=False ) # print("Section " + str(section_id) + ": \n" + json.dumps(editable_questions[section_id], indent=2, sort_keys=True)) if "sectionTitle" in editable_questions[section_id][0]: print( "\n" + Color.BOLD + editable_questions[section_id][0]["sectionTitle"] + Color.END ) for q in editable_questions[section_id]: print( "[" + Color.GREEN + str(question_id) + Color.END + "] : " + q["question"] ) questions_by_id[question_id] = q question_id += 1 print("\n\n" + Color.BOLD + "Settings" + Color.END) if all_questions: print( "[" + Color.PURPLE + "B" + Color.END + "] : Show only the questions for the bot instance you selected." ) else: print( "[" + Color.PURPLE + "A" + Color.END + "] : Show all questions, not filtered by a bot instance." ) print("[" + Color.PURPLE + "Enter" + Color.END + "] : Exit.") choice = input("\nSelect a question ID to answer (ENTER to quit): ") if choice == "": print() exit(0) if choice.lower() == "a": all_questions = True continue elif choice.lower() == "b": all_questions = False continue try: choice = int(choice) except ValueError: print(Color.RED + "Please type a number." + Color.END) continue _print_question(questions_by_id[choice]) answer = input("\nYour answer (ENTER to skip): ") if answer == "": print() continue else: _answer_question( server, user_key, questions_by_id[choice], answer, location_id ) return if commit: ### Commit your bot to the cloud ### import shutil if len(commit.split(".")) != 3: sys.stderr.write( Color.RED + "Your new bot name must conform to reverse domain-name notation, as in 'com.yourname.BotName'" + Color.END ) sys.stderr.write("\n\n") return 1 bundle = commit.replace("/", "") base_path = os.path.join(os.getcwd(), ".precommit_" + bundle) temporary_bot_directory = None _merge_redirects( os.path.join(os.getcwd(), commit), base_path, bundle, server, args.core_directory, ) if user_key is None: user_key = _login(server, username, password) marketing_file = os.path.join(base_path, MARKETING_FILENAME) version_file = os.path.join(base_path, RUNTIME_FILENAME) if not os.path.exists(marketing_file): sys.stderr.write(marketing_file + " does not exist") sys.stderr.write("\n\n") # Remove the last commit if os.path.isdir(base_path): shutil.rmtree(base_path, ignore_errors=True) return 1 marketing_text = "" with open(marketing_file) as f: for line in f: line = line.strip() if not line.startswith("#"): marketing_text += line try: marketing_data = json.loads(marketing_text) except Exception as e: sys.stderr.write( Color.RED + f"Your 'marketing.json' file isn't fully JSON-compliant. {e}\n" + Color.END ) sys.stderr.write( Color.RED + "Make sure all quotations are closed, and that your commas are not too many or too few.\n" + Color.END ) sys.stderr.write( Color.RED + "How about taking it over to a JSON validator, like http://jsonlint.com or https://jsonformatter.curiousconcept.com\n\n" + Color.END ) # Remove the last commit if os.path.isdir(base_path): shutil.rmtree(base_path, ignore_errors=True) return 1 try: billing_bot = marketing_data["app"]["type"] == 4 except KeyError: billing_bot = False if not billing_bot: if not os.path.exists(version_file): sys.stderr.write(version_file + " does not exist") sys.stderr.write("\n\n") # Remove the last commit if os.path.isdir(base_path): shutil.rmtree(base_path, ignore_errors=True) return 1 version_text = "" with open(version_file) as f: for line in f: line = line.strip() if not line.startswith("#"): version_text += line try: version_data = json.loads(version_text) except Exception as e: sys.stderr.write( Color.RED + f"Your RUNTIME_FILENAME file isn't fully JSON-compliant {e}.\n" + Color.END ) sys.stderr.write( Color.RED + "Make sure all quotations are closed, and that your commas are not too many or too few.\n" + Color.END ) sys.stderr.write( Color.RED + "How about taking it over to a JSON validator, like http://jsonlint.com or https://jsonformatter.curiousconcept.com" + Color.END ) # Remove the last commit if os.path.isdir(base_path): shutil.rmtree(base_path, ignore_errors=True) return 1 bot_path = None try: _bot_filename = "_bot_" + bundle print("Uploading the marketing file...") _create_or_update_app(server, user_key, bundle, marketing_data) organizational_bot = False if "organizational" in marketing_data["app"]: organizational_bot = marketing_data["app"]["type"] == 1 runtime = DEFAULT_RUNTIME_PYTHON memory = DEFAULT_MEMORY timeout = DEFAULT_RUNTIME_TIMEOUT_S if not billing_bot: if "version" not in version_data: print( "Your runtime.json file is missing a 'version' element. That's bizarre. Please fix it." ) # Remove the last commit if os.path.isdir(base_path): shutil.rmtree(base_path, ignore_errors=True) return -1 # Default to AWS Lambda if "runtime" not in version_data["version"]: version_data["version"]["runtime"] = DEFAULT_RUNTIME_PYTHON print("Uploading the runtime configuration...") # 'memory', 'runtime' and 'timeout' properties are moved to _upload_bot since Cloud 1.33 if _is_server_version_newer_than(server, 1, 33): if "runtime" in version_data["version"]: runtime = version_data["version"]["runtime"] del version_data["version"]["runtime"] if "memory" in version_data["version"]: memory = version_data["version"]["memory"] del version_data["version"]["memory"] if "timeout" in version_data["version"]: timeout = version_data["version"]["timeout"] del version_data["version"]["timeout"] _update_latest_version(server, user_key, bundle, version_data) else: # Billing bot print("🚀🚀🚀🌕") print("Generating the bot...") # Generate a new directory aws_lambda = True # If we're running on AWS Lambda, then the developer's folder goes into the base temporary directory # If we're running on a Docker server, then the developer's folder goes into the /content directory temporary_bot_directory = "." + os.sep + ".bot_" + bundle bot_subdirectory = "" if not aws_lambda: bot_subdirectory = os.sep + "content" # Remove the last commit if os.path.isdir(temporary_bot_directory): shutil.rmtree(temporary_bot_directory, ignore_errors=True) # Ignore these files ignore_list = [ ".botignore", ".DS_Store", "icon.png", ".redirect", "__pycache__", ] botignore_file = base_path + os.sep + ".botignore" if os.path.isfile(botignore_file): with open(botignore_file) as f: for line in f: if not line.startswith("#") and line.strip() != "": ignore_list.append(line.strip()) print( "Ignoring files (add more in your .botignore file): \n" + str(ignore_list) ) destination = temporary_bot_directory + bot_subdirectory # Copy the developer's bot directory into our temporary directory shutil.copytree( base_path, temporary_bot_directory + bot_subdirectory, ignore=shutil.ignore_patterns(*ignore_list), dirs_exist_ok=True ) # AWS Lambda requires us to also install 3rd party python packages if aws_lambda: pip_install = [] pip_install = list( set(pip_install) | set( _extract_packages( temporary_bot_directory + bot_subdirectory, True ) ) ) pip_install_remotely = prepare_dependencies( _extract_packages( temporary_bot_directory + bot_subdirectory, False ) ) print("Locally installed packages: " + str(pip_install)) print("Remotely installed packages: " + str(pip_install_remotely)) # Install the packages yourself locally if len(pip_install): import subprocess command_line = [sys.executable, "-m", "pip", "install"] command_line.extend(pip_install) command_line.extend(["-t", temporary_bot_directory]) subprocess.check_output(command_line) # Copy required files into the temporary directory _merge_botengine(temporary_bot_directory, aws_lambda=aws_lambda) if TAR: shutil.make_archive(_bot_filename, "tar", temporary_bot_directory) print("Generated {}".format(_bot_filename + ".tar")) size = os.path.getsize(_bot_filename + ".tar") else: shutil.make_archive(_bot_filename, "zip", temporary_bot_directory) size = os.path.getsize(_bot_filename + ".zip") print("Uploading the bot [{:.2f} MB]...".format(size / 1000000.0)) response = _upload_bot( server, user_key, bundle, _bot_filename, TAR, runtime=runtime, memory=memory, timeout=timeout, ) icon_path = os.path.join(base_path, "icon.png") if os.path.exists(icon_path): print("Uploading the icon...") _upload_icon(server, user_key, bundle, icon_path) else: print("Missing the icon...") if TAR: if "requestId" not in response: sys.stderr.write( Color.RED + "This bot was not uploaded properly.\n" + Color.END ) sys.stderr.write( Color.RED + "The response from the server was : " + json.dumps(response, indent=2, sort_keys=True) + "\n\n" + Color.END ) return -1 sys.stdout.write("Processing the bot at the server...") sys.stdout.flush() while True: status = _check_bot_processing( server, user_key, response["requestId"] ) if "resultCode" in status["result"]: if status["result"]["resultCode"] == 0: break else: if "resultCodeMessage" in status["result"]: # Bad result code so print the error message sys.stderr.write( Color.RED + "\n\n" + status["result"]["resultCodeMessage"] + "\n" + Color.END ) return -1 else: # This would be weird - you get a bad result code but no error message sys.stderr.write( Color.RED + "\n\nThis bot was not processed properly at the server.\n" + Color.END ) sys.stderr.write( json.dumps(status, indent=2, sort_keys=True) ) return -1 sys.stdout.write(".") sys.stdout.flush() time.sleep(1) sys.stdout.write("\n") sys.stdout.flush() except BotError as e: sys.stderr.write("BotEngine Error: {}".format(e)) sys.stderr.write("\n\n") # Remove the last commit if os.path.isdir(base_path): shutil.rmtree(base_path, ignore_errors=True) return 2 finally: if bot_path: os.remove(bot_path) # Remove the last commit if temporary_bot_directory is not None: if os.path.isdir(temporary_bot_directory): shutil.rmtree(temporary_bot_directory, ignore_errors=True) if base_path is not None: if os.path.isdir(base_path): shutil.rmtree(base_path, ignore_errors=True) if not KEEP_BOT_TAR_FILE: if os.path.exists(_bot_filename + ".tar"): os.remove(_bot_filename + ".tar") if make_it_so is None and not billing_bot: if location_id is None and organization_id is None: print(the_bot() + "Done!\n") return 0 bots = _botstore_mybots( server, user_key, location_id=location_id, organization_id=organization_id, ) if bots is not None: for bot in bots: if ( bot.get("bundle", bot.get("bot", {}).get("bundle")) == bundle ): # Cool beans. You already own this bot. Cya. print(the_bot() + "Done!") return 0 purchase_bundle_id = bundle choice = "n" if not organizational_bot: choice = input( "Purchase this bot into your personal account (y/n)? " ) # =================================================================== # else: # choice = input("Purchase this bot into organization " + str(organization_id) + " (y/n)? ") # # TODO: # 1. Ask if the user wants to select an organization to turn into a developer organization for this organizational bot # 2. Check the organization to see if it already purchased this bot # 3. If not, purchase the bot # =================================================================== if choice.lower() != "y": print(the_bot() + "Done!\n") return 0 # Fall through to purchase the bot you just committed but don't have in your account yet if purchase_bundle_id: if organization_id is not None: if user_key is None: if admin_username is None or admin_password is None: print( "Use --admin_username and --admin_password to perform this operation" ) return -1 user_key = _login( server, admin_username, admin_password, admin=True ) else: if user_key is None: user_key = _login(server, username, password) purchase_bundle_id = purchase_bundle_id.replace("/", "") bot_instance_id = _botstore_purchasebot( server, user_key, purchase_bundle_id, location_id=location_id, organization_id=organization_id, ) if bot_instance_id is not None: print( the_bot() + Color.BOLD + "Purchased bot instance ID: " + Color.GREEN + str(bot_instance_id) + Color.END + "\n" ) configure_bot_instance_id = bot_instance_id return 0 if configure_bot_instance_id: if user_key is None: user_key = _login(server, username, password) user_info = _get_user_info(server, user_key) bundle = "" try: full_configuration = _botstore_mybots( server, user_key, configure_bot_instance_id, location_id=location_id, organization_id=organization_id, ) current_app_configuration = full_configuration[0] bundle = current_app_configuration["bundle"] new_app_configuration = get_editable_bot_configuration( current_app_configuration ) except Exception: # Maybe the developer fed us a bundle ID instead of an bot instance ID. bundle = configure_bot_instance_id bundle = bundle.replace("/", "") configure_bot_instance_id = _get_instance_id_from_bundle_id( server, user_key, bundle, location_id=location_id ) if configure_bot_instance_id is None: sys.stderr.write( Color.RED + "This bot instance is not in your personal account.\n\n" + Color.END ) return 1 print( "Found bot instance " + Color.BOLD + str(configure_bot_instance_id) + Color.END + " matching the bundle ID you provided" ) full_configuration = _botstore_mybots( server, user_key, configure_bot_instance_id, location_id=location_id, organization_id=organization_id, ) current_app_configuration = full_configuration[0] new_app_configuration = get_editable_bot_configuration( current_app_configuration ) general_app_info = _botstore_botinfo( server, user_key, bundle, location_id=location_id ) devices = _get_devices_from_location(server, user_key, location_id) # print("GENERAL APP INFO: {}".format(json.dumps(general_app_info, indent=2, sort_keys=True))) is_organizational_app = False if "organizational" in general_app_info: is_organizational_app = general_app_info["organizational"] == 1 # Nickname print(Color.BOLD + "\n\nNICKNAME" + Color.END) try: nickname = current_app_configuration["nickname"] except KeyError: nickname = None if not nickname: try: nickname = current_app_configuration["name"] except KeyError: nickname = None print( "The bot's current nickname in your account is '" + Color.BOLD + str(nickname) + Color.END + "'." ) new_nickname = input( Color.GREEN + "Change the nickname, or press enter to keep the current nickname: " + Color.END ) if new_nickname: nickname = new_nickname new_app_configuration["app"]["nickname"] = nickname # Timezone import pytz new_timezone = pytz.timezone("US/Pacific") try: original_timezone = new_app_configuration["app"]["timezone"] except KeyError: original_timezone = None if original_timezone: print(Color.BOLD + "\n\nTIMEZONE" + Color.END) print( "The bot's current timezone is '" + Color.BOLD + original_timezone + Color.END + "'." ) change_timezone = input( Color.GREEN + "Update it to '" + Color.BOLD + new_timezone + Color.END + "'? (y/n): " + Color.END ) if change_timezone: if change_timezone.lower() != "y": new_timezone = original_timezone new_app_configuration["app"]["timezone"] = new_timezone # Access to locations, files, professional monitoring, and devices print(Color.BOLD + "\n\nMODE AND FILE PERMISSIONS" + Color.END) try: access_block = general_app_info["access"] except KeyError: access_block = None new_access_block = [] if access_block: for access in access_block: if access["category"] == 1: # Locations for location in user_info["locations"]: if location["id"] == location_id: print( "At your '" + Color.BOLD + location["name"] + Color.END + "' location:" ) print("This bot wants access to your modes") for r in access["reason"]: print("\t" + r + ": " + access["reason"][r]) ok = input( Color.GREEN + "\tEnter to accept, 'n' to opt-out: " + Color.END ) print("") configured_access_block = {} if (ok) and ok.lower() != "y": pass else: configured_access_block["category"] = access[ "category" ] configured_access_block["locationId"] = location[ "id" ] configured_access_block["trigger"] = access[ "trigger" ] configured_access_block["read"] = access["read"] configured_access_block["control"] = access[ "control" ] new_access_block.append(configured_access_block) pass elif access["category"] == 2: # Files print( "This bot wants to access your " + Color.BOLD + "Media Files" + Color.END + "." ) for r in access["reason"]: print("\t" + r + ": " + access["reason"][r]) ok = input( Color.GREEN + "\tEnter to accept, 'n' to opt-out: " + Color.END ) print("") configured_access_block = {} if (ok) and ok.lower() != "y": pass else: configured_access_block["category"] = access["category"] configured_access_block["trigger"] = access["trigger"] configured_access_block["read"] = access["read"] configured_access_block["control"] = access["control"] new_access_block.append(configured_access_block) pass elif access["category"] == 3: # Professional monitoring print( "This bot wants to access your " + Color.BOLD + "Professional Monitoring Services" + Color.END + "." ) for r in access["reason"]: print("\t" + r + ": " + access["reason"][r]) ok = input( Color.GREEN + "\tEnter to accept, 'n' to opt-out: " + Color.END ) print("") configured_access_block = {} if (ok) and ok.lower() != "y": pass else: configured_access_block["category"] = access["category"] configured_access_block["trigger"] = access["trigger"] configured_access_block["read"] = access["read"] configured_access_block["control"] = access["control"] new_access_block.append(configured_access_block) pass elif access["category"] == 5: # Professional monitoring print( "This bot wants to access your " + Color.BOLD + "Challenges" + Color.END + "." ) for r in access["reason"]: print("\t" + r + ": " + access["reason"][r]) ok = input( Color.GREEN + "\tEnter to accept, 'n' to opt-out: " + Color.END ) print("") configured_access_block = {} if (ok) and ok.lower() != "y": pass else: configured_access_block["category"] = access["category"] configured_access_block["trigger"] = access["trigger"] configured_access_block["read"] = access["read"] configured_access_block["control"] = access["control"] new_access_block.append(configured_access_block) pass elif access["category"] == 6: # Professional monitoring print( "This bot wants to access your " + Color.BOLD + "Rules" + Color.END + "." ) for r in access["reason"]: print("\t" + r + ": " + access["reason"][r]) ok = input( Color.GREEN + "\tEnter to accept, 'n' to opt-out: " + Color.END ) print("") configured_access_block = {} if (ok) and ok.lower() != "y": pass else: configured_access_block["category"] = access["category"] configured_access_block["trigger"] = access["trigger"] configured_access_block["read"] = access["read"] configured_access_block["control"] = access["control"] new_access_block.append(configured_access_block) pass try: devices_block = general_app_info["deviceTypes"] except KeyError: devices_block = None if devices_block and not is_organizational_app: print(Color.BOLD + "\n\nDEVICE PERMISSIONS" + Color.END) for device_block in devices_block: for focused_device in devices: if focused_device["type"] == device_block["id"]: if "desc" in focused_device: print( "This bot wants to access your '" + Color.BOLD + focused_device["desc"] + Color.END + "'." ) else: print( "This bot wants to access your '" + Color.BOLD + "Unnamed Device" + Color.END + "' ({}).".format(focused_device["id"]) ) for r in device_block["reason"]: print("\t" + r + ": " + device_block["reason"][r]) ok = input( Color.GREEN + "\tEnter to accept, 'n' to opt-out: " + Color.END ) print("") configured_access_block = {} configured_access_block["category"] = 4 configured_access_block["deviceId"] = focused_device["id"] if (ok) and ok.lower() != "y": configured_access_block["excluded"] = True else: configured_access_block["excluded"] = False new_access_block.append(configured_access_block) try: communications_block = general_app_info["communications"] except KeyError: communications_block = None new_communications_block = [] if communications_block: print(Color.BOLD + "\n\nCOMMUNICATION PERMISSIONS" + Color.END) for comm in communications_block: destinations = [] if comm["email"]: destinations.append("email") if comm["msg"]: destinations.append("in-bot messages") if comm["push"]: destinations.append("push notifications") if comm["sms"]: destinations.append("sms") phrase = "" i = 0 for m in destinations: i = i + 1 if len(destinations) > 1: if i == len(destinations): phrase = phrase + "and " + m if i < len(destinations): phrase = phrase + m + ", " else: phrase = m if comm["category"] == 0: print( Color.GREEN + "This bot wants to send" + Color.BOLD + " you " + Color.END + Color.GREEN + phrase + ". Enter to accept, 'n' to opt-out: " + Color.END ) ok = input("> ") elif comm["category"] == 1: print( Color.GREEN + "This bot wants to send" + Color.BOLD + " your friends " + Color.END + Color.GREEN + phrase + ". Enter to accept, 'n' to opt-out: " + Color.END ) ok = input("> ") elif comm["category"] == 2: print( Color.GREEN + "This bot wants to send" + Color.BOLD + " your family " + Color.END + Color.GREEN + phrase + ". Enter to accept, 'n' to opt-out: " + Color.END ) ok = input("> ") elif comm["category"] == 3: print( Color.GREEN + "This bot wants to send" + Color.BOLD + " your community group " + Color.END + Color.GREEN + phrase + ". Enter to accept, 'n' to opt-out: " + Color.END ) ok = input("> ") elif comm["category"] == 4: print( Color.GREEN + "This bot wants to send" + Color.BOLD + " your admins " + Color.END + Color.GREEN + phrase + ". Enter to accept, 'n' to opt-out: " + Color.END ) ok = input("> ") if ok: if ok.lower() != "y": continue configured_comms_block = {} configured_comms_block["category"] = comm["category"] configured_comms_block["email"] = comm["email"] configured_comms_block["push"] = comm["push"] configured_comms_block["sms"] = comm["sms"] configured_comms_block["msg"] = comm["msg"] new_communications_block.append(configured_comms_block) pass new_app_configuration["app"]["access"] = new_access_block new_app_configuration["app"]["communications"] = new_communications_block new_app_configuration["app"]["status"] = STATUS_BOT_ACTIVE status = STATUS_BOT_ACTIVE # print("CONFIGURATION BODY: \n" + json.dumps(new_app_configuration, indent=2, sort_keys=True)) result = _botstore_configure( server, user_key, configure_bot_instance_id, new_app_configuration, status, location_id=location_id, ) if result: print(the_bot() + "Configured!") return 0 else: print("Something went wrong during configuration.") return 1 if permissions_bot_instance_id: if user_key is None: user_key = _login(server, username, password) bundle = "" try: full_configuration = _botstore_mybots( server, user_key, permissions_bot_instance_id, location_id=location_id, organization_id=organization_id, ) current_app_configuration = full_configuration[0] bundle = current_app_configuration["bundle"] new_app_configuration = get_editable_bot_configuration( current_app_configuration ) except Exception: # Maybe the developer fed us a bundle ID instead of an bot instance ID. bundle = permissions_bot_instance_id bundle = bundle.replace("/", "") permissions_bot_instance_id = _get_instance_id_from_bundle_id( server, user_key, bundle, location_id=location_id ) if permissions_bot_instance_id is None: sys.stderr.write( Color.RED + "This bot instance is not in your personal account.\n\n" + Color.END ) return 1 print( "Found bot instance " + Color.BOLD + str(permissions_bot_instance_id) + Color.END + " matching the bundle ID you provided" ) full_configuration = _botstore_mybots( server, user_key, permissions_bot_instance_id, location_id=location_id, organization_id=organization_id, ) current_app_configuration = full_configuration[0] new_app_configuration = get_editable_bot_configuration( current_app_configuration ) devices = _get_devices_from_location(server, user_key, location_id) # Locations print(Color.BOLD + "\nLOCATIONS" + Color.END) found = False for access in current_app_configuration["access"]: if access["category"] == 1: found = True permissions = "" if access["read"]: permissions += Color.GREEN + "r" + Color.END else: permissions += Color.RED + "-" + Color.END if access["control"]: permissions += Color.GREEN + "w" + Color.END else: permissions += Color.RED + "-" + Color.END if access["trigger"]: permissions += Color.GREEN + "x" + Color.END else: permissions += Color.RED + "-" + Color.END print(permissions + " Location " + str(access["locationId"])) if not found: print( Color.RED + "---" + Color.END + " This bot cannot access any of your Locations or modes" + Color.END ) # TODO - files are category 2, but I don't think we support it yet as of the time of this writing. # Call Center print(Color.BOLD + "\nPROFESSIONAL MONITORING" + Color.END) found = False for access in current_app_configuration["access"]: if access["category"] == 3: found = True permissions = "" if access["read"]: permissions += Color.GREEN + "r" + Color.END else: permissions += Color.RED + "-" + Color.END if access["control"]: permissions += Color.GREEN + "w" + Color.END else: permissions += Color.RED + "-" + Color.END if access["trigger"]: permissions += Color.GREEN + "x" + Color.END else: permissions += Color.RED + "-" + Color.END print(permissions + " Professional Monitoring Services") if not found: print( Color.RED + "---" + Color.END + " This bot cannot access professional monitoring services." ) # Devices print(Color.BOLD + "\nDEVICES" + Color.END) found = False for access in current_app_configuration["access"]: if access["category"] == 4: # This access block represents a device. Map the device ID to a name. for device in devices: if "deviceId" in access: if device["id"] == access["deviceId"]: found = True permissions = "" if access["read"]: permissions += Color.GREEN + "r" + Color.END else: permissions += Color.RED + "-" + Color.END if access["control"]: permissions += Color.GREEN + "w" + Color.END else: permissions += Color.RED + "-" + Color.END if access["trigger"]: permissions += Color.GREEN + "x" + Color.END else: permissions += Color.RED + "-" + Color.END print( ( permissions + " [" + access["deviceId"] + "] " + device["desc"] ) ) if not found: print( Color.RED + "---" + Color.END + " This bot cannot access any of your devices" + Color.END ) print("\n" + the_bot() + "Done!") return 0 if add_team_member: if user_key is None: user_key = _login(server, username, password) team = input("\nTeam name: ") if team == "": sys.stderr.write( Color.RED + "The team name is required to add a team member." + Color.END ) sys.stderr.write("\n\n") return 1 member = input("\nUser ID (integer): ") if member == "": sys.stderr.write( Color.RED + "We need a valid userId to add the team member." + Color.END ) sys.stderr.write("\n\n") return 1 try: _add_team_member(server, user_key, team, member) except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\n\n") return 2 print("\n" + the_bot() + "Added a team member!") return 0 if delete_team_member: if user_key is None: user_key = _login(server, username, password) team = input("\nTeam name: ") if team == "": sys.stderr.write( Color.RED + "The team name is required to delete a team member." + Color.END ) sys.stderr.write("\n\n") return 1 member = input("User ID (integer): ") if member == "": sys.stderr.write( Color.RED + "We need a valid userId to delete the team member." + Color.END ) sys.stderr.write("\n\n") return 1 try: _delete_team_member(server, user_key, team, member) except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\n\n") return 2 print("\n" + the_bot() + "Deleted the team member!") return 0 if get_team_members: if user_key is None: user_key = _login(server, username, password) team = input("\nTeam name: ") bundle = input("Which bundle ID should we use: ") try: j = _get_team_members(server, user_key, team, bundle) teams = j.get("teams") if teams is None: if team is None or team == "": sys.stderr.write("You don't create or join any team yet.\n") else: sys.stderr.write( "The team '" + team + "' does not belong to you, or you haven't created it yet.\n" ) return 1 sys.stderr.write( Color.BOLD + "\nDEVELOPER TEAM INFO ON THE SERVER\n" + Color.END ) sys.stderr.write(json.dumps(teams, indent=2, sort_keys=True)) sys.stderr.write("\n\n") except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\n\n") return 2 return 0 if update: ### Update this local 'botengine' python application ### import subprocess print( "If asked, please provide your computer's password to install an update" ) subprocess.call( "curl -s https://raw.githubusercontent.com/peoplepower/botlab/master/installer.sh | sudo /bin/bash", shell=True, ) return 0 if publish: ### publish the bot if make_it_so is None and approve is None: are_you_sure = input( "Are you sure you want to submit this bot for review to make it public? (y/n): " ) if are_you_sure.lower() != "y": print(the_bot() + "Ok, aborting.") return 0 if user_key is None: if username is not None and password is not None: user_key = _login(server, username, password) elif admin_username is None and password is None: user_key = _login(server, username, password) bundle = publish.replace("/", "") if user_key is not None: _update_version_status(server, user_key, bundle, 2, ignore_errors=True) try: if make_it_so is not None or approve is not None: if admin_username is None: admin_username = username if admin_password is None: admin_password = password admin_key = _login( server, admin_username, admin_password, admin=True ) print(the_bot() + "Submitted for review.") _update_version_status(server, admin_key, bundle, 3) print(the_bot() + "Under review.") _update_version_status(server, admin_key, bundle, 4) print(the_bot() + "Published. Done!\n") return 0 except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\n\n") return 2 print( the_bot() + "Awaiting review! You can always --reject this version if you need to make updates." ) return 0 if reject: ### Reject the bot if user_key is None: user_key = _login(server, username, password) bundle = reject.replace("/", "") try: _update_version_status(server, user_key, bundle, 6) except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\n\n") return 2 print(the_bot() + "Developer rejected!") return 0 if download_device_id or download_device_type: # =================================================================== # try: # importlib.import_module("pandas") # except ImportError: # sys.stderr.write("Missing the 'pandas' module!\n") # sys.stderr.write("Please install this module by running 'pip install pandas'\n") # return 1 # =================================================================== ### Download CSV data - This is a developer helper function critical to analyze large data sets ### if user_key is None: user_key = _login(server, username, password) days_ago = input("How many days ago to start collecting data: ") try: initialization_days = int(days_ago) except ValueError: initialization_days = 1 from datetime import datetime import dateutil.relativedelta start_date = datetime.date.today() + dateutil.relativedelta.relativedelta( days=-initialization_days ) if download_device_id: # Download a single device ID from your account. all_devices = _get_devices_from_location(server, user_key, location_id) for device in all_devices: if device["id"] == download_device_id: behavior = None if "goalId" in device: behavior = str(device["goalId"]) _downloaded_data_to_csv( server, user_key, start_date, download_device_id, str(device["type"]), str(device["desc"]), behavior, location_id=location_id, ) print(the_bot() + "Done!") return 0 print("\nThat device doesn't exist at this location.") return 0 if download_device_type: if organization_id: # Find all devices of the given type in the organization and extract device ID and user ID pairs # Loop through each device_id+user_id combo and download devices = _get_devices_from_organization( server, user_key, organization_id, download_device_type ) for device in devices["devices"]: print( "Downloading " + str(device["id"]) + " '" + str(device["desc"]) + "' from user " + str(device["user"]["id"]) + " ..." ) behavior = None if "goalId" in device: behavior = str(device["goalId"]) _downloaded_data_to_csv( server, user_key, start_date, device["id"], download_device_type, str(device["desc"]), behavior, location_id=location_id, user_id=device["user"]["id"], ) else: # Download all device ID's of a certain type from your account. devices = _get_devices_from_location(server, user_key, location_id) for device in devices: if int(device["type"]) == int(download_device_type): print( "Downloading " + str(device["id"]) + " - '" + str(device["desc"]) + "' ..." ) behavior = None if "goalId" in device: behavior = str(device["goalId"]) _downloaded_data_to_csv( server, user_key, start_date, device["id"], download_device_type, str(device["desc"]), behavior, location_id=location_id, ) print(the_bot() + " Done!") return 0 if record: ### Download CSV data - This is a developer helper function critical to analyze large data sets ### print( Color.BOLD + "We strongly recommend using the Maestro CLI tool to record data out of locations." + Color.END ) if location_id is None: print( "Please specify a location ID to record from with the --location argument." ) exit(0) if user_key is None: if organization_id is None: user_key = _login(server, username, password) else: if admin_username is None or admin_password is None: print( "You specified an organization ID, but not an admin username+password.\nUse --admin_username and --admin_password to perform this operation" ) return -1 user_key = _login( server, admin_username, admin_password, admin=True ) days_ago = input("How many days ago to record: ") if int(days_ago) > 30: print( Color.RED + "Please use the Maestro CLI tool to safely download more than 30 days of data." + Color.END ) exit(0) try: initialization_days = int(days_ago) except ValueError: initialization_days = 1 destination = input("What directory should we save this into: ") if destination == "": destination = os.getcwd() elif not os.path.exists(destination): os.makedirs(destination) if organization_id is not None: # Organizational. Get a list of all locations in this organization. print("Downloading a list of locations ...") org_locations = _get_organization_locations( server, user_key, organization_id ) if len(org_locations) == 0: print("This organization has no locations.") exit(1) locations = [] print("Found {} locations.".format(len(org_locations))) for location in org_locations: id = location["id"] if "name" in location: name = location["name"] else: name = "Unnamed Location" locations.append((id, name)) else: # Non-organizational. Select the location we requested in the command line. location_info = _get_location_info(server, user_key, location_id) locations = [(location_id, location_info["name"])] device_properties = {} for location_id, name in locations: try: destination_directory = ( destination + os.sep + str(location_id) + " - '{}'".format( "".join( [ c for c in name if c.isalpha() or c.isdigit() or c == " " ] ).rstrip() ) ) if not os.path.exists(destination_directory): os.makedirs(destination_directory) filenames = [] import dateutil.relativedelta start_date = ( datetime.date.today() + dateutil.relativedelta.relativedelta( days=-initialization_days ) ) print("Downloading modes history ...") filenames.append( _download_modes_history_to_csv( server, user_key, location_id, start_date, destination_directory=destination_directory, ) ) # Download all device ID's of a certain type from your account. print("Gathering device manifest ...") devices = _get_devices_from_location(server, user_key, location_id) # { "id": "description" } recorded_devices = {} for device in devices: if device.get("type", 0) in [ 22, 23, 24, 10031, 31, 37, 36, 35, 40, ]: # Ignore gateway/proxy device types continue print( "Capturing " + str(device["id"]) + " - '" + str(device["desc"]) + "' ..." ) behavior = None if "goalId" in device: behavior = str(device["goalId"]) device_property = _download_device_property( server, user_key, location_id, device["id"] ) device_properties[device["id"]] = device_property recorded_devices[device["id"]] = device["desc"] filenames += _downloaded_data_to_csv( server, user_key, start_date, device["id"], device["type"], str(device["desc"]), behavior, location_id=location_id, destination_directory=destination_directory, ) # File objects to close later open_files = {} # Dictionary of headers[filename] is a dictionary of parameter names headers = {} for filename in filenames: headers[filename] = {} open_files[filename] = open(filename) headers[filename]["headers"] = ( open_files[filename].readline().replace("\n", "").split(",") ) for header in headers[filename]["headers"]: if header != "": headers[filename][header] = None values = ( open_files[filename].readline().replace("\n", "").split(",") ) for i in range(0, len(values)): if len(values) != len(headers[filename]["headers"]): print("# values != # headers for filename " + filename) print("values: " + str(values)) print("headers: " + str(headers[filename]["headers"])) headers[filename][headers[filename]["headers"][i]] = values[ i ].replace(COMMA_DELIMITER_REPLACEMENT_CHARACTER, ",") output_filename = os.path.join( destination_directory, "recording_location_{}-{}_days.json".format( location_id, initialization_days ), ) print("Writing " + output_filename + "...") with open(output_filename, "w") as out: out.write("{\n") out.write( '"location_info":' + json.dumps(location_info) + ",\n" ) out.write( '"device_properties":' + json.dumps(device_properties) + ",\n" ) out.write('"data":[\n') first_entry = True while True: oldest_timestamp_ms = None oldest_filename = None for filename in filenames: if "timestamp_ms" in headers[filename]: if ( headers[filename]["timestamp_ms"] is not None and headers[filename]["timestamp_ms"] != "None" ): if oldest_timestamp_ms is None: oldest_timestamp_ms = int( headers[filename]["timestamp_ms"] ) oldest_filename = filename elif ( int(headers[filename]["timestamp_ms"]) < oldest_timestamp_ms ): oldest_timestamp_ms = int( headers[filename]["timestamp_ms"] ) oldest_filename = filename if oldest_filename is None: break # Output this JSON content to the file output = headers[oldest_filename].copy() del output["headers"] if "" in output: del output[""] if first_entry: first_entry = False else: out.write(",\n") out.write(str(json.dumps(output))) # Get the next line out of the original file values = ( open_files[oldest_filename] .readline() .replace("\n", "") .split(",") ) headers[oldest_filename]["timestamp_ms"] = None for i in range(0, len(values)): headers[oldest_filename][ headers[oldest_filename]["headers"][i] ] = values[i].replace( COMMA_DELIMITER_REPLACEMENT_CHARACTER, "," ) out.write("\n]}\n") for file in open_files: open_files[file].close() # Extract a copy of each individual device for playback # We don't just want to do a json.dumps() because that makes the content unmangeable. for device_id in recorded_devices: recording = json.load(open(output_filename)) for i in list(recording["data"]): if i["device_id"] != device_id: recording["data"].remove(i) device_output_filename = os.path.join( destination_directory, "{}-{}_recording.json".format( start_date.strftime("%Y.%m.%d"), recorded_devices[device_id], ), ) print("Exporting {}...".format(device_output_filename)) with open(device_output_filename, "w") as out: out.write("{\n") for key in recording: if key != "data": out.write( '"{}": {},\n'.format( key, json.dumps(recording[key]) ) ) out.write('"data": [\n') for index, line in enumerate(recording["data"]): out.write("{}".format(json.dumps(line))) if index < len(recording["data"]) - 1: out.write(",\n") else: out.write("\n") out.write("]}\n") if len(locations) > 1: # We have to let the server relax between downloads wait_time_seconds = 30 print( "\n\nDownloading the next location in {} seconds ...".format( wait_time_seconds ) ) time.sleep(wait_time_seconds) except Exception as e: import traceback print( "Location ID {} ('{}') had a problem. Continuing even though we received this exception: \n".format( location_id, name ), e, ) traceback.print_exc() print(the_bot() + " Done!") return 0 if playback: global playback_timestamp_ms global playback_timer_timestamp global playback_location_info global playback_device_properties global playback_device_measurements global playback_variables global playback_timezone global playback_states global playback_execution_datetime global playback_chat_completions start_timestamp_ms = round(time.time() * 1000) playback_notifications_log = "playback_{}_notifications.txt".format( playback_session_id ) playback_narratives_log = "playback_{}_narratives.txt".format( playback_session_id ) playback_states_log = "playback_{}_states.txt".format(playback_session_id) playback_timeseries_states_log = "playback_{}_timeseries_states.txt".format( playback_session_id ) playback_location_priorities_log = ( "playback_{}_location_priorities.txt".format(playback_session_id) ) try: os.remove(playback_notifications_log) except FileNotFoundError: pass except Exception as e: print("Error removing playback_notifications_log: ", e) pass try: os.remove(playback_narratives_log) except FileNotFoundError: pass except Exception as e: print("Error removing playback_narratives_log: ", e) pass try: os.remove(playback_states_log) except FileNotFoundError: pass except Exception as e: print("Error removing playback_states_log: ", e) pass try: os.remove(playback_timeseries_states_log) except FileNotFoundError: pass except Exception as e: print("Error removing playback_timeseries_states_log: ", e) pass try: os.remove(playback_location_priorities_log) except FileNotFoundError: pass except Exception as e: print("Error removing playback_location_priorities_log: ", e) pass try: importlib.import_module("ijson") except ImportError: sys.stderr.write("Missing the 'ijson' module!\n") sys.stderr.write( "Please install this module by running 'pip3 install ijson'\n" ) return 1 # Set random seed for consistency of runs import random random.seed(0) import ijson playback_json_file = None unzipped_file_name = None if zipfile.is_zipfile(playback): file_name = os.path.basename(playback).split(".")[0] playback_path = os.path.join(os.getcwd(), "playback_tmp") unzipped_file_name = os.path.join(playback_path, file_name) playback_file_directory = unzipped_file_name playback_json_files = [] extracted_filenames = [] with zipfile.ZipFile(playback, "r") as z: for extracted_filename in z.namelist(): z.extract(extracted_filename, unzipped_file_name) extracted_path = os.path.join( unzipped_file_name, extracted_filename ) if ( ".json" in extracted_filename and "__MACOSX" not in extracted_filename ): extracted_filenames.append(extracted_filename) playback_json_files.append(extracted_path) json_file_count = len(playback_json_files) if json_file_count > 0: if json_file_count == 1: playback_json_file = playback_json_files[0] else: if args.playback_options and args.playback_options == "merged": for i in range(json_file_count): if "merged" in playback_json_files[i]: playback_json_file = playback_json_files[i] break else: files = "" for i in range(json_file_count): files += "{}.{};\n".format( i + 1, extracted_filenames[i] ) user_enter = input( "Choose the recorded file you want to playback:\n{}\nEnter the number(1 by default):".format( files ) ) if user_enter == "": playback_json_file = playback_json_files[0] elif user_enter.isdigit(): choose = int(user_enter) - 1 if -1 < choose < json_file_count: playback_json_file = playback_json_files[choose] else: print("Error: invalid number you entered.") else: print("Error: invalid number you entered.") else: print( "Error: there is no recorded json file could be used for playback." ) if playback_json_file is None: if unzipped_file_name is not None: import shutil shutil.rmtree(unzipped_file_name) sys.exit(0) return -1 else: # Load the recording playback_json_file = playback playback_file_directory = os.path.dirname(playback) with open(playback_json_file, "r") as f: # We store content that goes into the access block in an easily updatable format before forming the real access block. raw_access_content = {} # Extract location information ijson_location_info = ijson.items(f, "location_info") for value in ijson_location_info: playback_location_info = value break with open(playback_json_file, "r") as f: # Extract device information ijson_device_properties = ijson.items(f, "device_properties") for value in ijson_device_properties: playback_device_properties = value break with open(playback_json_file, "r") as f: # Extract device information ijson_device_measurements = ijson.items(f, "device_parameters") for value in ijson_device_measurements: playback_device_measurements = value break commit_location_id = None commit_bot_instance_id = None user_key = None admin_key = None analytic_key = None if force_save_states or save_states: if location_id is None: raise ValueError("You must specify a location ID to save states.") if admin_username is not None: admin_key = _login( server, admin_username, admin_password, admin=True ) if username is not None and password is not None: user_key = _login(server, username, password) if force_save_states: commit_location_id = location_id else: commit_location_id = location_id devices = _get_devices_from_location( server, admin_key, commit_location_id ) if devices: if unzipped_file_name is not None: import shutil shutil.rmtree(unzipped_file_name) print( "Error: please try to use a location has empty devices in it, cause we don't want to ruin someone's real home" ) sys.exit(0) return -1 if save_priorities: if location_id is None: raise ValueError( "You must specify a location ID to save priorities." ) if instance is None: raise ValueError( "You must specify a bot instance ID to save priorities." ) if username is not None and password is not None: user_key = _login(server, username, password) commit_location_id = location_id commit_bot_instance_id = instance analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, commit_bot_instance_id ) if save_narratives: if location_id is None: raise ValueError( "You must specify a location ID to save narratives." ) if instance is None: raise ValueError( "You must specify a bot instance ID to save narratives." ) if username is not None and password is not None: user_key = _login(server, username, password) commit_location_id = location_id commit_bot_instance_id = instance analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, commit_bot_instance_id ) location_id = playback_location_info["id"] location_timezone = playback_location_info.get('timezone', {'id': 'US/Pacific'}) playback_timezone = location_timezone["id"] location_name = playback_location_info["name"] location_zip = None location_lat = None location_long = None if "zip" in playback_location_info: location_zip = playback_location_info["zip"] if "latitude" in playback_location_info: location_lat = playback_location_info["latitude"] if "longitude" in playback_location_info: location_long = playback_location_info["longitude"] # Generate and import the bot base_path = os.path.join( os.getcwd(), ".{}-playback_{}".format(botname, session_id) ) _merge_redirects( os.path.join(os.getcwd(), botname), base_path, botname, server, args.core_directory, ) sys.path.insert(0, base_path) bot = importlib.import_module("bot") # Grab the runtime.json content runtime_text = "" with open(os.path.join(base_path, RUNTIME_FILENAME)) as f: for line in f: line = line.strip() if not line.startswith("#"): runtime_text += line runtime = json.loads(runtime_text)["version"] # Location to trigger off of if runtime.get("trigger", 0) & 0x2 != 0: # Trigger off of location mode changes raw_access_content["location"] = { "category": 1, "control": True, "location": { "event": "HOME", "latitude": location_lat, "locationId": location_id, "longitude": location_long, "name": location_name, "timezone": location_timezone, "zip": location_zip, }, "read": True, "trigger": False, } # Device types to trigger off of device_type_triggers = [] # Keep track of the last parameters for this device so we can update them properly on the next measurement. Dictionary(device_id) of dictionaries(param names). device_id_params = {} for device in runtime.get("deviceTypes", []): device_type_triggers.append(device["id"]) botengine = BotEngine({"apiKey": None, "apiHosts": None}, playback=True) botengine._download_binary_variable = playback_download_binary_variable botengine.flush_binary_variables = playback_flush_binary_variables botengine.flush_commands = playback_flush_commands botengine.flush_rules = playback_flush_rules botengine.flush_tags = playback_flush_tags botengine.delete_all_rules = playback_delete_all_rules botengine._execute_again_at_timestamp = playback_execute_again_at_timestamp botengine.notify = playback_notify botengine.is_server_version_newer_than = ( playback_is_server_version_newer_than ) botengine.get_device_property = playback_get_device_property botengine.get_measurements = playback_get_measurements botengine.get_spaces = playback_get_spaces botengine.resynchronize_questions = playback_resynchronize_questions botengine.narrate = playback_narrate botengine._flush_states = playback_flush_states botengine.set_location_priority = playback_set_location_priority botengine.get_state = playback_get_states botengine.get_timeseries_state = playback_get_timeseries_state botengine.set_mode = playback_set_mode botengine.get_mode_history = playback_get_mode_history # botengine.request_data = playback_request_data botengine.send_request_for_chat_completion = ( playback_send_request_for_chat_completion ) # Run it # original_start_timestamp_ms = int(time.time() * 1000) original_timestamp_ms = None latest_timestamp_ms = None # The last playback timestamp we executed # Used to determine if we need to trigger a schedule previous_timestamp_ms = None playback_data_requests = None playback_data_requests_triggered = False with open(playback_json_file, "r") as f: ijson_requests_properties = ijson.items(f, "data_requests") for value in ijson_requests_properties: if playback_data_requests is None: playback_data_requests = value else: _bot_loggers["botengine"].debug( "Unused playback data requests: {}".format(value) ) botengine.get_logger(f"{'botengine'}").debug( "playback_data_requests={}".format(playback_data_requests) ) with open(playback_json_file, "r") as f: if "run" in dir(bot): did_start_playback = False datas = ijson.items(f, "data.item") # Add an artificial no-op trigger to the end of our data to force bots to execute all the way to the current time. if args.playback_to_now: # We select a positive number trigger that is so far out there it becomes future-proof and creates a no-op execution inside bot.py. ts_now = int(time.time() * 1000) datas.append( {"trigger": str(1 << 100), "timestamp_ms": str(ts_now)} ) botengine.get_logger(f"{'botengine'}").debug( Color.BOLD + "Playing back the data to the current timestamp: {}".format( ts_now ) + Color.END ) time.sleep(1) last_trigger_data = {} triggered_device_ids = [] deferred_schedule_datetimes = [] from tqdm import tqdm for d in tqdm(datas): inputs = {} inputs["access"] = [] trigger = int(d["trigger"]) timestamp = int(d["timestamp_ms"]) timezone_str = None # Inject a data stream message before we begin, 'did_start_playback()' if not did_start_playback: # Form inputs to represent a data stream message did_start_playback = True datastream_inputs = { "dataStream": { "address": "did_start_playback", "feed": {}, }, "trigger": 256, "locationId": location_id, "time": timestamp, "access": [], } for access_id in raw_access_content: content = dict(raw_access_content[access_id]) content["trigger"] = False datastream_inputs["access"].append(content) _run( bot, {"inputs": [datastream_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) previous_timestamp_ms = latest_timestamp_ms latest_timestamp_ms = timestamp if original_timestamp_ms is None: original_timestamp_ms = timestamp if "location" in raw_access_content: if "prevEvent" in raw_access_content["location"]: del raw_access_content["location"]["prevEvent"] if "location" in raw_access_content["location"]: if ( "timezone" in raw_access_content["location"]["location"] ): if ( "id" in raw_access_content["location"]["location"][ "timezone" ] ): timezone_str = raw_access_content["location"][ "location" ]["timezone"]["id"] # Turn off all triggers until we find the right one to activate for access_id in raw_access_content: raw_access_content[access_id]["trigger"] = False dt_now = playback_get_datetime_from_timestamp( timestamp, timezone_str ) # print("" \ # "\n" \ # "\n=============== Playback Time Statistics ===============" \ # "\n" \ # "\n\tPlayback : {}" \ # "\n\tRunning : {}" \ # "\n\tDelta : {}" \ # "\n" \ # "\n========================================================" \ # "\n".format( # timestamp - original_timestamp_ms, # Running time in ms relative to data start time # int(time.time() * 1000) - original_start_timestamp_ms, # Running time in ms relative to current start time # (timestamp - original_timestamp_ms) - (int(time.time() * 1000) - original_start_timestamp_ms))) # Difference between the two if playback_execution_datetime is None: playback_execution_datetime = dt_now # Run on schedules defined by each microservice, if available # `schedules` are a key:value list of schedule_id:cron_expression like this: # "schedules": { # "MIDNIGHT": "0 0 0 1/1 * ? *", # "HOUR": "0 0 0/1 1/1 * ? *" # } if ( "schedules" in runtime and len(runtime["schedules"]) > 0 and previous_timestamp_ms is not None ): import croniter # Enumerate all schedules and sort them by the next scheduled timestamp schedule_datetimes = [] # (schedule_id, next_datetime) for schedule_id in runtime["schedules"]: # Get the cron expression # Translate Quartz expressions (second minute hour day-of-week month day ?year) to Cron expressions (minute hour day-of-week month day) cron_expression = " ".join( [ e for e in runtime["schedules"][schedule_id] .replace("?", "*") .split(" ")[1:][:5] ] ) botengine.get_logger(f"{'botengine'}").debug("Check Cron... => {}".format(cron_expression)) cron = croniter.croniter(cron_expression) dt_previous = playback_get_datetime_from_timestamp( previous_timestamp_ms, timezone_str ) dt_next = cron.get_next( dt_previous.__class__, dt_previous ) botengine.get_logger(f"{'botengine'}").debug("ID == {}".format(schedule_id)) botengine.get_logger(f"{'botengine'}").debug("Previous => {}".format(dt_previous)) botengine.get_logger(f"{'botengine'}").debug("Now => {}".format(dt_now)) if dt_next > dt_now: continue # botengine.get_logger(f"{'botengine'}").debug("Next => {} - {}".format(dt_next, schedule_id)) # Incorporate variance in schedule triggers, which may get fired few minutes late import random from datetime import timedelta rand_minutes = random.randint(5, 35) botengine.get_logger(f"{'botengine'}").debug("Next (rand) => {} - {} ({})".format(dt_next, schedule_id, rand_minutes)) dt_deferred = dt_next + timedelta(minutes=rand_minutes) # If this schedule will happen in the future then store it for later # and ignore other schedules with the same ID until it fires if dt_deferred > dt_now: exists = False for schedule_id2, schedule_dt2, schedule_dt_original in deferred_schedule_datetimes: if schedule_id2 == schedule_id and schedule_dt_original == dt_next: exists = True break if exists: botengine.get_logger(f"{'botengine'}").debug("Deferred schedule already exists {} - {} > {}".format(schedule_id, dt_next, dt_deferred)) continue botengine.get_logger(f"{'botengine'}").debug("Deferred schedule {} - {} > {}".format(schedule_id, dt_next, dt_deferred)) deferred_schedule_datetimes.append((schedule_id, dt_deferred, dt_next)) continue schedule_datetimes.append((schedule_id, dt_deferred)) while dt_deferred <= dt_now: dt_previous = dt_deferred dt_deferred = cron.get_next( dt_previous.__class__, dt_previous ) dt_deferred = dt_deferred + timedelta(minutes=rand_minutes) if dt_deferred <= dt_now: botengine.get_logger(f"{'botengine'}").debug("Next => {} - {}".format(dt_deferred, schedule_id)) schedule_datetimes.append( (schedule_id, dt_deferred) ) # dt_next = dt_deferred + timedelta() # Include the earliest deferred schedule if it is ready to # be processed and compare with the other pending schedules if len(deferred_schedule_datetimes) > 0: deferred_schedule_datetimes.sort(key=lambda x: x[1]) if deferred_schedule_datetimes[0][1] <= dt_now: schedule_id2, schedule_dt2, schedule_dt_original = deferred_schedule_datetimes.pop(0) botengine.get_logger(f"{'botengine'}").debug("Deferred schedule is ready to be processed {} - {}".format(schedule_id2, schedule_dt2)) schedule_datetimes.append((schedule_id2, schedule_dt2)) schedule_datetimes.sort(key=lambda x: x[1]) executed_datetimes = [] botengine.get_logger(f"{'botengine'}").debug("Schedule datetimes: {}".format(schedule_datetimes)) # Enumerate all schedules processing_schedules = True while processing_schedules: botengine.get_logger(f"{'botengine'}").debug("Processing schedules...") if len(schedule_datetimes) == 0: botengine.get_logger(f"{'botengine'}").debug("No schedules to process") processing_schedules = False for schedule_id, schedule_dt in schedule_datetimes: botengine.get_logger(f"{'botengine'}").debug("Processing schedule {} - {}".format(schedule_id, schedule_dt)) # Process this datetime schedule if we have not processed it before if schedule_dt not in executed_datetimes: botengine.get_logger(f"{'botengine'}").debug("\tNew schedule to process") # Trigger this schedule timestamp_schedule = int( schedule_dt.timestamp() * 1000 ) # Run multiple timers that may trigger before the schedule botengine.get_logger(f"{'botengine'}").debug("\tProcessing timers...") while playback_timer_timestamp is not None: if ( playback_timer_timestamp < timestamp_schedule ): # Run but with inputs that reflect a timer timer_inputs = {} timer_inputs["locationId"] = location_id timer_inputs["time"] = ( playback_timer_timestamp ) timer_inputs["trigger"] = 64 timer_inputs["access"] = [] for access_id in raw_access_content: timer_inputs["access"].append( raw_access_content[access_id] ) playback_timestamp_ms = ( playback_timer_timestamp ) playback_timer_timestamp = None _bot_loggers["botengine"].info( Color.RED + "Executing timer {}; right now is {}; waiting for schedule '{}' - {}".format( playback_timestamp_ms, timestamp, schedule_id, timestamp_schedule, ) + Color.END ) _run( bot, {"inputs": [timer_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) playback_variables = botengine.variables # Execute any pending actions if len(playback_chat_completions) > 0: _bot_loggers["botengine"].info( "Injecting chat completions: playback_chat_completions={}".format( playback_chat_completions ) ) for ( key, data, openai_organization_id, ) in playback_chat_completions: # key = chat_completion["key"] # params = chat_completion["params"] datastream_inputs = { "dataStream": { "address": "openai", "feed": { "key": key, "id": "chatcmpl-86GKbsl3bP5YmmxutIY8aS5c5o5gK", "object": "chat.completion", "created": 1696503437, "model": "gpt-3.5-turbo-0613", "choices": [ { "index": 0, "message": { "role": "assistant", "content": "MOCKED RESPONSE", }, "finish_reason": "stop", } ], "usage": { "prompt_tokens": 13, "completion_tokens": 5, "total_tokens": 18, }, }, }, "trigger": 256, "locationId": location_id, "time": timestamp, "access": [], } for ( access_id ) in raw_access_content: content = dict( raw_access_content[ access_id ] ) content["trigger"] = False datastream_inputs[ "access" ].append(content) _run( bot, { "inputs": [ datastream_inputs ] }, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) playback_chat_completions = [] else: break playback_timestamp_ms = timestamp_schedule # Check for other schedules that may be triggered at the same time schedule_ids = [schedule_id] for ( schedule_id2, schedule_dt2, ) in schedule_datetimes: if schedule_id2 == schedule_id: continue if schedule_dt2 == schedule_dt: schedule_ids.append(schedule_id2) botengine.get_logger(f"{'botengine'}").debug("\tRunning schedules: {}".format(schedule_ids)) # Run the schedule schedule_inputs = { "scheduleIds": schedule_ids, "trigger": 1, "locationId": location_id, "time": timestamp_schedule, "access": [], } for access_id in raw_access_content: content = dict( raw_access_content[access_id] ) content["trigger"] = False schedule_inputs["access"].append(content) _run( bot, {"inputs": [schedule_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) playback_variables = botengine.variables previous_timestamp_ms = timestamp_schedule botengine.get_logger(f"{'botengine'}").debug("\tUpdating executed schedules {}".format(schedule_dt)) executed_datetimes.append(schedule_dt) # Close out if this is our last schedule if (schedule_id, schedule_dt) == schedule_datetimes[ -1 ]: botengine.get_logger(f"{'botengine'}").debug("Last schedule processed") processing_schedules = False # Run multiple timers that may trigger before each other trigger, but after any predetermined schedules while playback_timer_timestamp is not None: if playback_timer_timestamp < timestamp: # Run but with inputs that reflect a timer timer_inputs = {} timer_inputs["locationId"] = location_id timer_inputs["time"] = playback_timer_timestamp timer_inputs["trigger"] = 64 timer_inputs["access"] = [] for access_id in raw_access_content: timer_inputs["access"].append( raw_access_content[access_id] ) playback_timestamp_ms = playback_timer_timestamp playback_timer_timestamp = None _bot_loggers["botengine"].info( Color.RED + "Executing timer {}; right now is {}; waiting for next trigger '{}'".format( playback_timestamp_ms, timestamp, trigger ) + Color.END ) _run( bot, {"inputs": [timer_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) playback_variables = botengine.variables # Execute any pending actions if len(playback_chat_completions) > 0: _bot_loggers["botengine"].info( "Injecting chat completions: playback_chat_completions={}".format( playback_chat_completions ) ) for ( key, data, openai_organization_id, ) in playback_chat_completions: # key = chat_completion["key"] # params = chat_completion["params"] datastream_inputs = { "dataStream": { "address": "openai", "feed": { "key": key, "id": "chatcmpl-86GKbsl3bP5YmmxutIY8aS5c5o5gK", "object": "chat.completion", "created": 1696503437, "model": "gpt-3.5-turbo-0613", "choices": [ { "index": 0, "message": { "role": "assistant", "content": "MOCKED RESPONSE", }, "finish_reason": "stop", } ], "usage": { "prompt_tokens": 13, "completion_tokens": 5, "total_tokens": 18, }, }, }, "trigger": 256, "locationId": location_id, "time": timestamp, "access": [], } for access_id in raw_access_content: content = dict( raw_access_content[access_id] ) content["trigger"] = False datastream_inputs["access"].append(content) _run( bot, {"inputs": [datastream_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) playback_chat_completions = [] else: break playback_timestamp_ms = timestamp if trigger not in last_trigger_data: last_trigger_data[trigger] = {} # Schedules if trigger == 1: schedule_inputs = { "trigger": 1, "locationId": location_id, "time": timestamp, "access": [], } if "schedule_id" in d: schedule_inputs["scheduleId"] = d["schedule_id"] if "schedule_ids" in d: schedule_inputs["scheduleIds"] = d["schedule_ids"] for access_id in raw_access_content: content = dict(raw_access_content[access_id]) content["trigger"] = False schedule_inputs["access"].append(content) _run( bot, {"inputs": [schedule_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) playback_variables = botengine.variables # Modes elif trigger == 2: continue # Ignore modes for now if "location" in raw_access_content: raw_access_content["location"]["location"][ "prevEvent" ] = raw_access_content["location"]["location"]["event"] raw_access_content["location"]["location"]["event"] = d[ "event" ] raw_access_content["location"]["trigger"] = True else: # Modes are not a runtime.json trigger continue # Alerts elif trigger == 4: if int(d["device_type"]) in device_type_triggers: raw_access_content[d["device_id"]] = { "category": 4, "control": False, "device": { "connected": True, "description": d["description"], "deviceId": d["device_id"], "deviceType": int(d["device_type"]), "goalId": int(d["behavior"]), "spaces": [ { "id": 0, "spaceType": int(space_type), "name": "SPACE", } for space_type in d.get("spaces", "").split( "," ) if len(space_type) > 0 ], "locationId": int(location_id), "measureDate": timestamp, "startDate": 0, "updateDate": timestamp, }, "read": True, "trigger": True, } # The alerts go into an 'alerts' object inside the inputs. if "[online]" in d: # Included device activities (online/offline) # When a device disconnects, it will send an alert like this: [{u'alertType': u'status', u'params': [{u'name': u'deviceStatus', u'value': u'2'}], u'deviceId': u'eb10e80a006f0d00'}] # When a device reconnects, it will send an alert like this: [{u'alertType': u'on', u'deviceId': u'eb10e80a006f0d00'}] for parameter in d: if parameter == "[online]": offline = d[parameter] == "0" or not bool(d[parameter]) raw_access_content[d["device_id"]]["device"]["connected"] = not offline if offline: inputs["alerts"] = [ { "alertType": "status", "params": [ { "name": "deviceStatus", "value": "2", } ], "deviceId": d["device_id"], } ] else: inputs["alerts"] = [ { "alertType": "on", "deviceId": d["device_id"], } ] else: # Alerts provided by a data request, type 9 alert = { "alertType": d["alert_type"], "deviceId": d["device_id"], } params = [] for parameter in d: if parameter in [ "trigger", "location_id", "device_type", "device_id", "description", "timestamp_ms", "timestamp_iso", "timestamp_excel", "behavior", "spaces", "alertType", ]: continue params.append( {"name": parameter, "value": d[parameter]} ) if len(params) > 0: alert["params"] = params inputs["alerts"] = [alert] else: # This device type is not a runtime.json trigger continue elif trigger == 8: if int(d["device_type"]) in device_type_triggers: raw_access_content[d["device_id"]] = { "category": 4, "control": False, "device": { "connected": True, "description": d["description"], "deviceId": d["device_id"], "deviceType": int(d["device_type"]), "locationId": int(location_id), "measureDate": timestamp, "goalId": int(d["behavior"]), "spaces": [ { "id": 0, "spaceType": int(space_type), "name": "SPACE", } for space_type in d.get("spaces", "").split( "," ) if len(space_type) > 0 ], # "proxyId": "LCGW-86c503a8005f1500", # "remoteAddrHash": "befa55256736f5c0e87b6ec1876b8e05b773bbd70346f9f2da419fc3d9374aeb", "startDate": 0, "updateDate": timestamp, }, "read": True, "trigger": True, } if d["device_id"] not in device_id_params: device_id_params[d["device_id"]] = {} inputs["measures"] = [] for parameter in d: if parameter in [ "trigger", "location_id", "device_type", "device_id", "description", "timestamp_ms", "timestamp_iso", "timestamp_excel", "behavior", "spaces", "alertType", ]: continue measure = { "deviceId": d["device_id"], "name": parameter, } if "updated_params" in d: measure["updated"] = parameter in d["updated_params"].split(",") else: if parameter in device_id_params[d["device_id"]]: measure["prevTime"] = device_id_params[ d["device_id"] ][parameter]["time"] measure["prevValue"] = device_id_params[ d["device_id"] ][parameter]["value"] measure["updated"] = ( d[parameter] != measure["prevValue"] ) # Handle measurements that are provided with identical values but different timestamps by checking if all the parameters in this trigger are identical to the last parameters trigger if not measure["updated"]: if int( d["device_type"] ) == 2000 and parameter in ["bedStatus"]: if ( last_trigger_data[trigger][ d["device_id"] ] is not None ): last_d = last_trigger_data[trigger][ d["device_id"] ] is_identical = True for param in last_d: if param in [ "trigger", "location_id", "device_type", "device_id", "description", "timestamp_ms", "timestamp_iso", "timestamp_excel", "behavior", "alertType", ]: continue if last_d[param] != d[param]: is_identical = False break if is_identical: measure["updated"] = True else: measure["updated"] = True measure["time"] = timestamp measure["value"] = d[parameter].replace( COMMA_DELIMITER_REPLACEMENT_CHARACTER, "," ) device_id_params[d["device_id"]][parameter] = ( measure ) inputs["measures"].append(measure) last_trigger_data[trigger][d["device_id"]] = d else: # This device type is not a runtime.json trigger continue # Data Streams elif trigger == 256: continue # Ignore data streams for now if d.get("feed") is None: botengine.get_logger(f"{'botengine'}").error( "Datastream missing feed! datastream={}".format(d) ) continue inputs["dataStream"] = { "address": d["address"], "feed": json.loads(d["feed"]), } # Messages elif trigger & BotEngine.TRIGGER_MESSAGES != 0: { "time": 1446537883000, "trigger": 4096, "source": 5, "locationId": 123, "messages": [ { "messageId": 123, "scheduleType": 0, "status": 1, "topicId": "general", "appInstanceId": 456, "contentKey": "Short content description", "creationTime": 1646537712000, "maxDeliveryTime": 1666537712000, "deliveryDayTime": 36000, "timeToLive": 3600, }, { "messageId": 124, "scheduleType": 0, "status": 2, "topicId": "general", "appInstanceId": 456, "contentKey": "Short content description", "creationTime": 1646537712000, "maxDeliveryTime": 1666537712000, "deliveryDayTime": 36000, "timeToLive": 3600, "deliveryTime": 1666037712000, }, { "messageId": 125, "scheduleType": 1, "status": 1, "topicId": "general", "appInstanceId": 457, "contentKey": "Short content description", "creationTime": 1646537712000, "maxDeliveryTime": 1666537712000, "timeToLive": 3600, "schedule": "0 0 10 ? * SUN", }, { "messageId": 126, "originalMessageId": 123, "scheduleType": 0, "status": 3, "topicId": "general", "userId": 789, "contentText": "Reply from a user", "lang": "en", "creationTime": 1646537712000, }, ], } # TODO: Implement messages playback for access_id in raw_access_content: inputs["access"].append(raw_access_content[access_id]) inputs["time"] = timestamp inputs["trigger"] = trigger inputs["locationId"] = location_id # Run timers that happen before the next execution if playback_timer_timestamp is not None: if playback_timer_timestamp < timestamp: # Run but with inputs that reflect a timer timer_inputs = {} timer_inputs["locationId"] = location_id timer_inputs["time"] = playback_timer_timestamp timer_inputs["trigger"] = 64 timer_inputs["access"] = [] for access_id in raw_access_content: timer_inputs["access"].append( raw_access_content[access_id] ) playback_timestamp_ms = playback_timer_timestamp playback_timer_timestamp = None # _bot_loggers["botengine"].info(Color.RED + "Executing timer {}; right now is {}".format(playback_timestamp_ms, timestamp) + Color.END) _run( bot, {"inputs": [timer_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) playback_variables = botengine.variables playback_timestamp_ms = timestamp # After playback for 48 hours simulate a data_request if available and not yet triggered if ( not playback_data_requests_triggered and playback_data_requests is not None ): hours_later = 1 # 48 if ( timestamp - original_timestamp_ms ) > 1000 * 60 * 60 * hours_later: botengine.get_logger(f"{'botengine'}").debug( "playback_data_requests={}".format( playback_data_requests ) ) for ( device_id, filepath, ) in playback_data_requests.items(): in_file = open( "{}/{}".format( playback_file_directory, filepath ), "rb", ) data = in_file.read() lines = [ line.decode("utf-8") .replace("\n", "") .replace("\r", "") .split(",") for line in data.splitlines() ] csv_headers = lines.pop(0) param_idx = csv_headers.index("paramName") # Retrieve all unique parameter names param_names = [] for line in lines: if line[param_idx] in param_names: continue param_names.append(line[param_idx]) request = { "type": botengine.DATA_REQUEST_TYPE_PARAMETERS, "deviceId": device_id, "startTime": original_timestamp_ms, "endTime": botengine.get_timestamp(), "paramNames": param_names, "key": "all", # "index": # "ordered": } botengine.get_logger(f"{'botengine'}").debug( "Injecting initial data_request={}".format( request ) ) botengine.data_requests.append(request) # Run on any pending data_requests if len(botengine.data_requests) > 0: botengine.get_logger(f"{'botengine'}").debug( "playback data_requests={}".format( botengine.data_requests ) ) if playback_data_requests is not None: all_request_data = [] for data_request in botengine.data_requests: if data_request.get("key") is None: # Missing reference key, skip this request continue request_data = { "type": botengine.DATA_REQUEST_TYPE_PARAMETERS, "key": data_request["key"], "deviceId": data_request["deviceId"], "startTime": data_request["startTime"], "endTime": data_request["endTime"], "paramNames": data_request.get("paramNames"), "index": data_request.get("index"), } data = None for ( device_id, filepath, ) in playback_data_requests.items(): if device_id != data_request["deviceId"]: continue request_data["url"] = "{}/{}".format( playback_file_directory, filepath ) in_file = open( "{}/{}".format( playback_file_directory, filepath ), "rb", ) data = in_file.read() if data is None: continue botengine.get_logger(f"{'botengine'}").debug( "playback data_request={}".format(request_data) ) # Include data constrained to the data request start and end time constrained_data = [] lines = [ line.decode("utf-8") .replace("\n", "") .replace("\r", "") .split(",") for line in data.splitlines() ] csv_headers = lines.pop(0) constrained_data.append(csv_headers) param_idx = csv_headers.index("measureTime") botengine.get_logger(f"{'botengine'}").debug( "playback data_request first line={}".format( lines[0] ) ) botengine.get_logger(f"{'botengine'}").debug( "playback data_request last line={}".format( lines[-1] ) ) [ constrained_data.append(line) for line in lines if int(line[param_idx]) >= data_request["startTime"] and int(line[param_idx]) <= data_request["endTime"] ] if len(constrained_data) > 1: botengine.get_logger(f"{'botengine'}").debug( "playback data_request first data={}".format( constrained_data[1] ) ) botengine.get_logger(f"{'botengine'}").debug( "playback data_request last data={}".format( constrained_data[-1] ) ) request_data["data"] = "\n".join( [ ",".join(line) for line in constrained_data ] ).encode("utf-8") request_data["dataLength"] = len( request_data["data"] ) all_request_data.append(request_data) if len(request_data) > 0: request_inputs = { "time": timestamp, "data": all_request_data, "trigger": botengine.TRIGGER_DATA_REQUEST, "locationId": location_id, "access": [], } botengine.get_logger(f"{'botengine'}").debug( "playback injecting data request response: request_inputs={}".format( request_inputs ) ) for access_id in raw_access_content: request_inputs["access"].append( raw_access_content[access_id] ) _run( bot, {"inputs": [request_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, playback=True, ) # Expect the trigger was delivered even if there was no data available botengine.data_requests = [] playback_data_requests_triggered = True # If this is the first trigger for this device then insert a Device Configuration trigger to start (trigger 128) if ( "device_id" in d and d["device_id"] not in triggered_device_ids ): triggered_device_ids.append(d["device_id"]) inputs_copy = dict(inputs) inputs_copy["trigger"] = 128 _run( bot, {"inputs": [inputs_copy]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) # Run against our real trigger _run( bot, {"inputs": [inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) playback_variables = botengine.variables # requests = None # with open(playback_json_file, 'r') as f: # ijson_requests_properties = ijson.items(f, 'requests') # for value in ijson_requests_properties: # requests = value # break # # if requests is not None: # inputs = {} # inputs['time'] = requests[0]['completionDateMs'] # inputs['data'] = requests # inputs['trigger'] = 1 << 11 # inputs['locationId'] = location_id # inputs['access'] = [] # for access_id in raw_access_content: # inputs['access'].append(raw_access_content[access_id]) # _run(bot, {"inputs": [inputs]}, _bot_loggers["botengine"], botengine_override=botengine, playback=True) # Conclude with a data stream message did_stop_playback() datastream_inputs = { "dataStream": {"address": "did_stop_playback", "feed": None}, "trigger": 256, "locationId": location_id, "time": timestamp, "access": [], } for access_id in raw_access_content: content = dict(raw_access_content[access_id]) content["trigger"] = False datastream_inputs["access"].append(content) _run( bot, {"inputs": [datastream_inputs]}, _bot_loggers["botengine"], botengine_override=botengine, local=True, playback=True, ) runtime_duration_ms = round(time.time() * 1000) - start_timestamp_ms virtual_duration_ms = latest_timestamp_ms - original_timestamp_ms runtime_duration_minutes = round(runtime_duration_ms / 1000 / 60, 1) virtual_duration_hours = round(virtual_duration_ms / 1000 / 60 / 60, 1) # Write playback states out to the file import copy output_states = copy.deepcopy(playback_states) with open(playback_states_log, "w") as myfile: if None in output_states: myfile.write(json.dumps(output_states[None], indent=2)) del output_states[None] with open(playback_timeseries_states_log, "w") as myfile: if len(output_states) > 0: myfile.write(json.dumps(output_states, indent=2)) # Move all artifacts to a new directory for this session import shutil playback_dir = f"playback_{botname}_{session_id}" os.makedirs(playback_dir) try: shutil.move( playback_states_log, f"{playback_dir}/playback_states_{session_id}.txt", ) except Exception as e: print(f"Error moving playback_states_log: {e}") pass try: shutil.move( playback_timeseries_states_log, f"{playback_dir}/playback_timeseries_states_{session_id}.txt", ) except Exception as e: print(f"Error moving playback_timeseries_states_log: {e}") pass try: shutil.move( playback_narratives_log, f"{playback_dir}/playback_narratives_{session_id}.txt", ) except Exception as e: print(f"Error moving playback_narratives_log: {e}") pass try: shutil.move( playback_notifications_log, f"{playback_dir}/playback_notifications_{session_id}.txt", ) except Exception as e: print(f"Error moving playback_notifications_log: {e}") pass try: shutil.move( playback_location_priorities_log, f"{playback_dir}/playback_location_priorities_{session_id}.txt", ) except Exception as e: print(f"Error moving playback_location_priorities_log: {e}") pass # Copy the playback zip or json file to the new directory if zipfile.is_zipfile(playback): shutil.copy(playback, f"{playback_dir}/playback.zip") else: shutil.copy(playback, f"{playback_dir}/playback.json") # Move the compiled bot to the new directory shutil.move(base_path, f"{playback_dir}/botname") # Split log files into smaller files lines_per_file = 250000 smallfile = None with open(f"playback_{session_id}_log.txt") as logfile: for lineno, line in enumerate(logfile): if lineno % lines_per_file == 0: if smallfile: smallfile.close() small_filename = f"{playback_dir}/playback_{session_id}_log_{lineno + lines_per_file}.txt" smallfile = open(small_filename, "w") smallfile.write(line) if smallfile: smallfile.close() # Remove the original log file try: os.remove(f"playback_{session_id}_log.txt") except Exception as e: print(f"Error removing playback_{session_id}_log.txt: {e}") pass # Set playback timestamp to now from datetime import datetime playback_timestamp_ms = datetime.now().timestamp() * 1000 playback_timezone = "US/Pacific" _bot_loggers["botengine"].error( "Fast-forwarded {} hours of playback into only {} minutes - {}% time savings!".format( virtual_duration_hours, runtime_duration_minutes, round((1 - (runtime_duration_ms / virtual_duration_ms)) * 100, 2), ) ) _bot_loggers["botengine"].error( "Exported runtime history to {}".format(playback_dir) ) _bot_loggers["botengine"].error(the_bot() + " Done!") if force_save_states or save_states: # Refresh the API key again in case the bot playback takes too long if admin_username is not None: admin_key = _login( server, admin_username, admin_password, admin=True ) if username is not None and password is not None: user_key = _login(server, username, password) try: _delete_states(server, admin_key, commit_location_id) except Exception as e: print(f"Error deleting states: {e}") pass for timestamp, value in playback_states.items(): # Time series status should not be added prior to 2020-01-01 min_timestamp = 1577854800000 if timestamp is not None and timestamp <= min_timestamp: _bot_loggers["botengine"].info( "Cannot upload timeseries state, date too early." ) continue for address, json_content in value.items(): if timestamp is not None: _bot_loggers["botengine"].info( "Uploading '{}' at timestamp {} to location ID {}...".format( address, timestamp, commit_location_id ) ) else: _bot_loggers["botengine"].info( "Uploading '{}' to location ID {}...".format( address, commit_location_id ) ) _bot_loggers["botengine"].debug( json.dumps(value, indent=2, sort_keys=True) ) try: _set_state( server, admin_key, commit_location_id, address, json_content, overwrite=True, timestamp_ms=timestamp, publish_to_partner=False, ) except BotError as e: # Expired API Key if e.code == 2: # Refresh the API key again in case the bot playback takes too long if admin_username is not None: admin_key = _login( server, admin_username, admin_password, admin=True ) if username is not None and password is not None: user_key = _login(server, username, password) _set_state( server, admin_key, commit_location_id, address, json_content, overwrite=True, timestamp_ms=timestamp, publish_to_partner=False, ) else: raise e time.sleep(0.5) if save_priorities: # Refresh the API key again in case the bot playback takes too long if username is not None and password is not None: user_key = _login(server, username, password) analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, commit_bot_instance_id ) if playback_location_priority is not None: _bot_loggers["botengine"].info( "Setting location priority to {}...".format( playback_location_priority ) ) _set_location_priority( server, analytic_key, commit_location_id, playback_location_priority, ) time.sleep(0.5) if save_narratives: # !! Use with caution. Often times bots produce LOTS of narratives. This will save them all. # As a safe guard, analytic narratives are not published. # Refresh the API key again in case the bot playback takes too long if username is not None and password is not None: user_key = _login(server, username, password) analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, commit_bot_instance_id ) if len(playback_narratives): _bot_loggers["botengine"].info("Creating or updating narratives...") first_narrative, first_params = playback_narratives[0] last_narrative, last_params = playback_narratives[-1] _bot_loggers["botengine"].info( "Retrieving existing narratives between {} and {}...".format( first_narrative["narrativeTime"], last_narrative["narrativeTime"], ) ) narratives_response = _get_narratives( server, user_key, commit_location_id, row_count=100, start_date_ms=first_narrative["narrativeTime"], end_date_ms=last_narrative["narrativeTime"] + 1000, ) current_narratives = narratives_response.get("narratives", []) while narratives_response.get("nextMarker", None) is not None: _bot_loggers["botengine"].info("Retrieving more narratives...") narratives_response = _get_narratives( server, user_key, commit_location_id, start_date_ms=first_narrative["narrativeTime"], end_date_ms=last_narrative["narrativeTime"] + 1000, page_marker=narratives_response["nextMarker"], ) current_narratives.extend(narratives_response["narratives"]) _bot_loggers["botengine"].info( "Current narratives: {}".format(len(current_narratives)) ) for narrative, params in playback_narratives: event_type = narrative.get("eventType") narrative_time = narrative["narrativeTime"] _bot_loggers["botengine"].info( "Checking narrative '{}' at timestamp {} to location ID {}...".format( event_type, narrative_time, commit_location_id ) ) update_narrative_id = params.get("narrativeId") update_narrative_time = params.get("narrativeTime") if ( update_narrative_id is None and update_narrative_time is None ): should_skip = False # Multiple narratives of the same time and date may be published # Find each, then check if any of them match the pending narrative, if so, ignore it matched = [] # [(id, should_update, idx)] for idx, current_narrative in enumerate(current_narratives): if current_narrative[ "narrativeDateMs" ] == narrative_time and current_narrative.get( "eventType" ) == ( event_type[:50] if event_type is not None else None ): _bot_loggers["botengine"].info( "Found existing narrative ID {}".format( current_narrative["id"] ) ) # Check that this narrative is different # Remove any None values from the `target` before comparing should_update = 0 # Represents the number of fields that are different if current_narrative.get("title") != narrative.get( "title" ): _bot_loggers["botengine"].info("Title differs") should_update += 1 if current_narrative.get( "priority" ) != narrative.get("priority"): _bot_loggers["botengine"].info( "Priority differs" ) should_update += 1 if current_narrative.get( "description" ) != narrative.get("description"): _bot_loggers["botengine"].info( "Description differs" ) should_update += 1 if current_narrative.get("icon") != narrative.get( "icon" ): _bot_loggers["botengine"].info("Icon differs") should_update += 1 if current_narrative.get( "iconFont" ) != narrative.get("iconFont"): _bot_loggers["botengine"].info( "Icon font differs" ) should_update += 1 if { k: v for k, v in current_narrative.get( "target", {} ).items() if v is not None } != { k: v for k, v in narrative.get("target", {}).items() if v is not None }: _bot_loggers["botengine"].info("Target differs") should_update += 1 if should_update > 0: _bot_loggers["botengine"].info( "Narrative differs for {} fields".format( should_update ) ) _bot_loggers["botengine"].info( "Current narrative: {}".format( current_narrative ) ) _bot_loggers["botengine"].info( "New narrative: {}".format(narrative) ) matched.append( (current_narrative["id"], should_update, idx) ) if len(matched) > 0: sorted_matches = sorted(matched, key=lambda x: x[1]) _bot_loggers["botengine"].info( "Sorted matches: {}".format(sorted_matches) ) if sorted_matches[0][1] == 0: _bot_loggers["botengine"].info( "Skipping identical narrative" ) should_skip = True else: # Update the narrative with the most matches update_narrative_id = sorted_matches[0][0] update_narrative_time = narrative_time current_narratives.pop(sorted_matches[0][2]) else: _bot_loggers["botengine"].info( "Did not find existing narrative: {}".format( narrative ) ) if should_skip: continue scope = params.get("scope", 1) publish = params.get("publish") _bot_loggers["botengine"].info( "Creating or updating narrative (id: {}, time: {}): {}".format( update_narrative_id, update_narrative_time, narrative ) ) try: _create_or_update_narrative( server, analytic_key, commit_location_id, scope, publish, update_narrative_id, update_narrative_time, narrative, ) except BotError as e: # Expired API Key if e.code == 2: # Refresh the API key again in case the bot playback takes too long if username is not None and password is not None: user_key = _login(server, username, password) analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, commit_bot_instance_id ) _create_or_update_narrative( server, analytic_key, commit_location_id, scope, publish, update_narrative_id, update_narrative_time, narrative, ) elif e.code == 6: # Narrative could not be updated, skip it! continue else: raise e time.sleep(0.5) # break _bot_loggers["botengine"].info( "Finished creating or updating narratives" ) _bot_loggers["botengine"].info( "Removing unmatched narratives: {}".format( len(current_narratives) ) ) for current_narrative in current_narratives: _bot_loggers["botengine"].info( "Deleting unmatched narrative ID {}".format( current_narrative["id"] ) ) try: _delete_narrative( server, analytic_key, commit_location_id, 1, current_narrative["id"], current_narrative["narrativeDateMs"], ) except BotError as e: # Expired API Key if e.code == 2: # Refresh the API key again in case the bot playback takes too long if username is not None and password is not None: user_key = _login(server, username, password) analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, commit_bot_instance_id ) _delete_narrative( server, analytic_key, commit_location_id, 1, current_narrative["id"], current_narrative["narrativeDateMs"], ) else: raise e time.sleep(0.5) # break _bot_loggers["botengine"].info( "Finished deleting unmatched narratives" ) sys.exit(0) return 0 if generate is not None: # Generate the bot import shutil bundle = generate.replace("/", "") base_path = os.path.join(os.getcwd(), ".precommit_" + bundle) _merge_redirects( os.path.join(os.getcwd(), generate), base_path, bundle, server, args.core_directory, ) # If we're running on AWS Lambda, then the developer's folder goes into the base temporary directory # If we're running on a Docker server, then the developer's folder goes into the /content directory temporary_bot_directory = os.path.join(os.getcwd(), "." + bundle) bot_subdirectory = "" # Remove the last commit if os.path.isdir(temporary_bot_directory): shutil.rmtree(temporary_bot_directory, ignore_errors=True) # Ignore these files ignore_list = [".botignore", ".DS_Store", "icon.png", ".redirect"] botignore_file = base_path + os.sep + ".botignore" if os.path.isfile(botignore_file): with open(botignore_file) as f: for line in f: if not line.startswith("#") and line.strip() != "": ignore_list.append(line.strip()) print( "Ignoring files (add more in your .botignore file): \n" + str(ignore_list) ) # Copy the developer's bot directory into our temporary directory shutil.copytree( base_path, temporary_bot_directory, ignore=shutil.ignore_patterns(*ignore_list), ) # Add the best botengine representation into the temporary directory _merge_botengine(temporary_bot_directory) # AWS Lambda requires us to also install 3rd party python packages - this is informational pip_install = [] pip_install = list( set(pip_install) | set( _extract_packages(temporary_bot_directory + bot_subdirectory, True) ) ) pip_install_remotely = prepare_dependencies( _extract_packages(temporary_bot_directory + bot_subdirectory, False) ) print("Locally installed packages: " + str(pip_install)) print("Remotely installed packages: " + str(pip_install_remotely)) # Remove the staging directory if os.path.isdir(base_path): shutil.rmtree(base_path, ignore_errors=True) print( Color.BOLD + "Generated bot: {}".format(temporary_bot_directory) + Color.END ) print("\n" + the_bot() + "Done!") return 0 if args.actions == "engage_kit": if not _is_server_version_newer_than(server, 41, 0): _bot_loggers["botengine"].warning( "This feature is not available on this cloud!" ) return 0 if user_key is None: user_key = _login(server, username, password) # Message Topics if args.update_message_topics: if args.bundle_id is None: sys.stderr.write(Color.RED + "Missing the bundle ID." + Color.END) sys.stderr.write("\n\n") return 1 # Update the topics of a given bot bundle if user_key is None: user_key = _login(server, username, password) ### Commit your bot to the cloud ### import shutil bundle_id = args.bundle_id.replace("/", "") if len(bundle_id.split(".")) != 3: sys.stderr.write( Color.RED + "Your new bot name must conform to reverse domain-name notation, as in 'com.yourname.BotName'" + Color.END ) sys.stderr.write("\n\n") return 1 base_path = os.path.join(os.getcwd(), bundle_id) if user_key is None: user_key = _login(server, username, password) topics_file = os.path.join(base_path, TOPICS_FILENAME) topics_text = "" with open(topics_file) as f: for line in f: line = line.strip() if not line.startswith("#"): topics_text += line topics = json.loads(topics_text) try: _update_message_topics(server, user_key, bundle_id, topics) except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\n\n") return 2 print("\n" + Color.BOLD + "Updated!" + Color.END + "") return 0 if args.get_message_topics: if args.bundle_id: analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, instance ) app_info = _botstore_botinfo( server, user_key, args.bundle_id.replace("/", "") ) app_id = app_info.get("appId") if app_id is None: sys.stderr.write( Color.RED + "The bundle ID is not valid." + Color.END ) sys.stderr.write("\n\n") return 1 topics = _get_message_topics( server, analytic_key=analytic_key, app_id=app_id, lang=args.language or "en", ) else: topics = _get_message_topics( server, key=user_key, lang=args.language or "en" ) if "topics" not in topics: print( Color.RED + "There are no topics on this server.\n\n" + Color.END ) else: print(Color.BOLD + "Topics" + Color.END) print("-" * 50) for topic in topics["topics"]: print( "\t{}{}{}: {}".format( Color.BOLD, str(topic["topicId"]), Color.END, topic["name"], ) ) print() return 0 # Messages if args.create_messages: if args.messages_json is None: sys.stderr.write( Color.RED + "Missing the messages json content.\n" + Color.END ) sys.stderr.write( Color.RED + "To create messages: --create_messages -j " + Color.END ) sys.stderr.write("\n\n") return 1 try: messages = json.loads(args.messages_json) except Exception: sys.stderr.write( Color.RED + "Invalid messages json content." + Color.END ) sys.stderr.write("\n\n") return 1 if instance is not None: analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, instance ) messages = _create_messages( server, key=user_key, analytic_key=analytic_key, messages_json=messages, ) else: if location_id is None: sys.stderr.write( Color.RED + "Missing the location ID." + Color.END ) sys.stderr.write("\n\n") return 1 messages = _create_messages( server, key=user_key, location_id=location_id, messages_json=messages, ) print(messages) return 0 if args.update_messages: if args.messages_json is None: sys.stderr.write( Color.RED + "Missing the messages json content.\n" + Color.END ) sys.stderr.write( Color.RED + "To update messages: --update_messages -j " + Color.END ) sys.stderr.write("\n\n") return 1 try: messages = json.loads(args.messages_json) except Exception: sys.stderr.write( Color.RED + "Invalid messages json content." + Color.END ) sys.stderr.write("\n\n") return 1 if instance is None: sys.stderr.write( Color.RED + "Missing the bot instance ID." + Color.END ) sys.stderr.write("\n\n") return 1 analytic_key = _get_botengine_key( server, user_key, BOT_KEY_TYPE_NORMAL, instance ) _update_messages(server, key=analytic_key, messages_json=messages) print() return 0 if args.get_messages: if location_id is None: sys.stderr.write(Color.RED + "Missing the location ID." + Color.END) sys.stderr.write("\n\n") return 1 if args.messages_start_date is None: sys.stderr.write(Color.RED + "Missing the start date." + Color.END) sys.stderr.write( Color.RED + "To get messages: --get_messages -s " + Color.END ) sys.stderr.write("\n\n") return 1 topic_id = args.topic_id end_date = args.messages_end_date read_status = args.messages_read_status messages = _get_messages( server, user_key, location_id, args.messages_start_date, end_date, instance, topic_id, read_status, ) if "messages" not in messages: print(Color.RED + "No delivered messages found.\n\n" + Color.END) else: print(Color.BOLD + "Messages" + Color.END) print("-" * 50) for message in messages["messages"]: print( "\t{}{}{}{}: {} - {} - {}".format( Color.BOLD, f"{'🤖' if 'appInstanceId' in message else '👱'} {message['messageId']}\t", f"> {message['originalMessageId']}\t" if "originalMessageId" in message else "\t", Color.END, message["deliveryDate"], " Read" if message["readStatus"] else "Unread", message.get("contentKey", "") ) ) print() return 0 if args.update_message_read_status: if location_id is None: sys.stderr.write(Color.RED + "Missing the location ID." + Color.END) sys.stderr.write("\n\n") return 1 if args.message_id is None: sys.stderr.write(Color.RED + "Missing the message ID." + Color.END) sys.stderr.write( Color.RED + "To update message read status: --update_message_read_status -m -r " + Color.END ) sys.stderr.write("\n\n") return 1 if args.messages_read_status is None: sys.stderr.write( Color.RED + "Missing the new read status." + Color.END ) sys.stderr.write( Color.RED + "To update message read status: --update_message_read_status -m -r " + Color.END ) sys.stderr.write("\n\n") return 1 _update_message_read_status( server, user_key, location_id, args.message_id, args.messages_read_status, ) print() return 0 if args.actions == "bot_organizations": if args.add_bot_organization: try: if args.organization_id is None: raise BotError("Missing the organization ID.", -1) except Exception as e: _bot_loggers["botengine"].error(e) return 0 if user_key is None: user_key = _login(server, username, password) _add_bot_to_organization( server, user_key, args.add_bot_organization, args.organization_id ) _bot_loggers["botengine"].info( "Bot {} added to organization {}.".format( args.add_bot_organization, args.organization_id ) ) if args.approve_bot_organization: try: if args.organization_id is None: raise BotError("Missing the organization ID.", -1) if args.bot_organization_status is None: raise BotError("Missing the status.", -1) if args.bot_organization_status not in ["1", "2"]: raise BotError("Invalid status.", -1) except Exception as e: _bot_loggers["botengine"].error(e) return 0 if user_key is None: user_key = _login( server, admin_username, admin_password, admin=True ) _approve_bot_for_organization( server, user_key, args.approve_bot_organization, args.organization_id, args.bot_organization_status, args.bot_organization_development, ) _bot_loggers["botengine"].info( "Bot {} status set to {} in organization {}.".format( args.approve_bot_organization, args.bot_organization_status, args.organization_id, ) ) if args.remove_bot_organization: try: if args.organization_id is None: raise BotError("Missing the organization ID.", -1) except Exception as e: _bot_loggers["botengine"].error(e) return 0 if user_key is None: user_key = _login(server, username, password) _remove_an_organization( server, user_key, args.remove_bot_organization, args.organization_id ) _bot_loggers["botengine"].info( "Bot {} removed from organization {}.".format( args.remove_bot_organization, args.organization_id ) ) if args.get_bot_organizations: if user_key is None: user_key = _login(server, username, password) bot_organizations = _get_organizations( server, user_key, args.get_bot_organizations ) organizations = bot_organizations.get("organizations", []) if len(organizations) == 0: print("No organizations found.") for organization in organizations: if organization.get("development", True): development = Color.YELLOW + "Development" + Color.END else: development = Color.GREEN + "Public" + Color.END print( f"{Color.BOLD}{organization['name']}{Color.END} - {organization['id']} ({development})" ) print("\n") return 0 if args.actions == "bot_shop_search": if user_key is None: if organization_id is not None: user_key = _login( server, admin_username, admin_password, admin=True ) else: user_key = _login(server, username, password) search = _botstore_search( server, user_key, args.bot_shop_search_search_by, args.bot_shop_search_categories, args.bot_shop_search_compatible, args.bot_shop_search_lang, args.bot_shop_search_core, location_id, organization_id, args.bot_shop_search_object_names, args.bot_shop_search_limit, ) apps = search.get("apps", []) if len(apps) == 0: print("No bots found.") for bot in apps: if bot.get("compatible", True): compatibility = Color.GREEN + "(Compatible)" + Color.END else: compatibility = Color.RED + "(Incompatible)" + Color.END print( Color.BOLD + "+ " + bot["name"] + " - by " + bot["author"] + " " + compatibility + Color.END ) print("\t" + Color.UNDERLINE + bot["bundle"] + Color.END) try: print("\t" + bot["description"].replace("\n", "\n\t\t")) except Exception as e: print(f"\t{Color.RED}Error: {e}{Color.END}") pass print("\n") return 0 if args.actions == "execution": if args.execution_history: try: if args.bundle is None and args.app_instance_id is None: raise BotError( "Missing the bot bundle ID or the app instance ID.", -1 ) if args.start_date_ms is None: raise BotError("Missing the start date.", -1) if args.end_date_ms is None: raise BotError("Missing the end date.", -1) except Exception as e: _bot_loggers["botengine"].error(e) return if user_key is None: user_key = _login(server, username, password) bundle = args.bundle if args.app_instance_id is not None: bundle = None execution_history = _botstore_execution_history( server, user_key, bundle=args.bundle, developer=args.developer, app_instance_id=args.app_instance_id, flow=args.execution_flow, trigger=args.trigger, errors_only=args.errors_only, start_date=args.start_date_ms, end_date=args.end_date_ms, row_count=args.row_count, sort_order=args.sort_order, ) print(json.dumps(execution_history, indent=2, sort_keys=True)) return if args.actions == "bot_instance_logging": try: if args.app_instance_id is None: raise BotError("Missing the app instance ID.", -1) if args.execution_flow is None: raise BotError("Missing the execution flow.", -1) except Exception as e: _bot_loggers["botengine"].error(e) return if user_key is None: user_key = _login(server, username, password) if args.set_cloudwatch_logging: set_cloudwatch_logging( server, user_key, args.app_instance_id, args.execution_flow, args.logging_status, args.logging_end_date_ms, ) _bot_loggers["botengine"].info( "Logging status set to {} for bot instance {}.".format( args.logging_status, args.app_instance_id ) ) if args.logging_status == "1": _bot_loggers["botengine"].info( "Logging will finish on {}".format( BotEngine._strftimestamp( int(args.logging_end_date_ms) // 1000 ) ) ) if args.get_cloudwatch_log_info: try: info = get_cloudwatch_log_info( server, user_key, args.app_instance_id, args.execution_flow ) _bot_loggers["botengine"].info( "Logging status is {}.".format(info["status"]) ) if info["status"] == 1: _bot_loggers["botengine"].info( "Your bot instance {} is logging to AWS CloudWatch Log Group {} and Log Stream {}.".format( args.app_instance_id, info["groupName"], info["streamName"], ) ) _bot_loggers["botengine"].info( "Logging will finish on {}".format(info["endDate"]) ) if "lastEventDate" in info: _bot_loggers["botengine"].info( "The most recent log event in the log stream was on {}".format( info["lastEventDate"] ) ) else: _bot_loggers["botengine"].info( "Your bot instance {} is not logging to AWS CloudWatch.".format( args.app_instance_id ) ) except BotError as e: if e.code == 6: _bot_loggers["botengine"].info( "Your bot instance {} is not logging to AWS CloudWatch.".format( args.app_instance_id ) ) else: _bot_loggers["botengine"].error(e) return if args.actions == "bot_instance_log_export": if args.create_an_export_task: try: if args.app_instance_id is None: raise BotError("Missing the app instance ID.", -1) if args.execution_flow is None: raise BotError("Missing the execution flow.", -1) if args.start_date_ms is None: raise BotError("Missing the start date.", -1) except Exception as e: _bot_loggers["botengine"].error(e) return if user_key is None: user_key = _login(server, username, password) try: log_export = create_an_export_task( server, user_key, args.app_instance_id, args.execution_flow, args.start_date_ms, args.end_date_ms, ) _bot_loggers["botengine"].info( "Export task created. Check the status and download your logs using: `bot_instance_log_export --get_export_status --task_id {}'".format( log_export["taskId"] ) ) except BotError as e: _bot_loggers["botengine"].error(e) return if args.get_export_status: try: if args.task_id is None: raise BotError("Missing the task ID.", -1) except Exception as e: _bot_loggers["botengine"].error(e) if user_key is None: user_key = _login(server, username, password) try: log_export = get_export_status(server, user_key, args.task_id) _bot_loggers["botengine"].debug( "log_export: {}".format( json.dumps(log_export, indent=2, sort_keys=True) ) ) _bot_loggers["botengine"].info( "Export task for bot instance '{}' is {}.".format( log_export["appInstanceId"], log_export["statusCode"] ) ) if log_export["statusCode"] == "CANCELLED": return if log_export["statusCode"] == "COMPLETED": _bot_loggers["botengine"].info("The task is completed.") if "files" not in log_export: _bot_loggers["botengine"].info( "No files were exported. Please try changing your export parameters." ) return should_continue = input("Download the exported files? (Y/N):") if should_continue not in ["Y", "y"]: _bot_loggers["botengine"].info( "You can download these logs here: {}".format( log_export["files"] ) ) return pass if log_export["statusCode"] == "FAILED": _bot_loggers["botengine"].info( "The task failed. Please try again or contact support." ) return if log_export["statusCode"] == "PENDING": _bot_loggers["botengine"].info( "The task is pending. Please check back later." ) return if log_export["statusCode"] == "PENDING_CANCEL": _bot_loggers["botengine"].info( "The task is pending cancellation." ) return if log_export["statusCode"] == "RUNNING": _bot_loggers["botengine"].info( "The task is running. The exported files will be available soon." ) return _bot_loggers["botengine"].info("Download the exported files") log_export_path = os.path.join( os.getcwd(), "log_exports", str(log_export["appInstanceId"]) ) if not os.path.exists("log_exports"): os.makedirs("log_exports") if not os.path.exists(log_export_path): os.makedirs(log_export_path) # Download and extract the log files log_files = [] for idx in range(len(log_export["files"])): file = log_export["files"][idx] _bot_loggers["botengine"].info("Downloading: {}".format(file)) import gzip import shutil import requests with requests.get(file, stream=True) as r: with open( os.path.join(log_export_path, f"{idx}.gz"), "wb" ) as f: shutil.copyfileobj(r.raw, f) _bot_loggers["botengine"].info( "Downloaded: {}".format( os.path.join(log_export_path, f"{idx}.gz") ) ) with gzip.open( os.path.join(log_export_path, f"{idx}.gz"), "rb" ) as f: file_content = f.read() extracted_path = os.path.join(log_export_path, f"{idx}.log") _bot_loggers["botengine"].debug( "Extracted: {}".format(extracted_path) ) with open(extracted_path, "wb") as in_file: in_file.write(file_content) log_files.append(extracted_path) os.remove(os.path.join(log_export_path, f"{idx}.gz")) # Sort the log files by timestamp _bot_loggers["botengine"].info("Sorting logs...") chunks = {} # {RequestId: [log_lines]} current_chunk = None all_request_ids = [] # Track for reference later # Loop through all log files and compile of list of each chunk by RequestId # RequestIds may be split where either 2 BEGIN statements are found or 2 END statements are found in a row # We assume that chunks are not split across log files for log_filename in log_files: _bot_loggers["botengine"].debug(f"Reading {log_filename}") for line in open( os.path.join(log_export_path, log_filename), "r" ): if "LOG RequestId:" in line: if "BEGIN" in line: request_id = line.split(" ")[-1] _bot_loggers["botengine"].debug( f"Begin of chunk: {request_id}" ) all_request_ids.append(request_id) if current_chunk is None: if request_id in chunks: _bot_loggers["botengine"].debug( f"\tOrphaned chunk resumed: {request_id}" ) current_chunk = chunks[request_id] current_chunk.append(line) else: _bot_loggers["botengine"].debug( f"\tFound new chunk: {request_id}" ) current_chunk = [line] else: current_chunk_request_id = current_chunk[ 0 ].split(" ")[-1] _bot_loggers["botengine"].debug( f"\tOrphaned chunk paused: {current_chunk_request_id}" ) _bot_loggers["botengine"].debug( f"\t\tChunk: {current_chunk}" ) chunks[current_chunk_request_id] = current_chunk current_chunk = [line] if "END" in line: request_id = line.split(" ")[-1] _bot_loggers["botengine"].debug( f"End of chunk: {request_id}" ) if current_chunk is not None: current_chunk.append(line) if "BEGIN" in current_chunk[0]: _bot_loggers["botengine"].debug( f"\tComplete chunk: {request_id}" ) chunks[request_id] = current_chunk elif request_id in chunks: _bot_loggers["botengine"].debug( f"\tOrphaned chunk resumed: {request_id}" ) _bot_loggers["botengine"].debug( f"\t\tChunk: {current_chunk}" ) chunks[request_id].extend(current_chunk) else: _bot_loggers["botengine"].debug( f"\tOrphaned chunk paused: {request_id}" ) chunks[request_id] = current_chunk _bot_loggers["botengine"].debug( f"\tFinished chunk: {current_chunk}" ) current_chunk = None else: if current_chunk is not None: _bot_loggers["botengine"].debug( f"Append line: {line}" ) current_chunk.append(line) else: _bot_loggers["botengine"].debug( f"Orphaned chunk start: {line}" ) current_chunk = [line] # Reorder orphaned chunks for idx, request_id in enumerate(chunks): chunk = chunks[request_id] if "BEGIN" not in chunk[0]: _bot_loggers["botengine"].debug( f"Reordering orphaned chunk: {idx}" ) # Get the index of 'BEGIN' ordered_chunk = [] for i, line in enumerate(chunk): if "BEGIN" in line: # Add the chunk in order ordered_chunk = chunk[i:] ordered_chunk.extend(chunk[:i]) break chunks[request_id] = ordered_chunk _bot_loggers["botengine"].debug( f"Finished reading {log_filename}" ) _bot_loggers["botengine"].debug( f"{len(all_request_ids)} request IDs found" ) _bot_loggers["botengine"].debug(f"{len(chunks)} chunks found") # Analysis for mismatched count of request IDs and chunks if len(chunks) != len(all_request_ids): _bot_loggers["botengine"].debug("Analyzing mismatched chunks") for request_id in all_request_ids: if request_id not in chunks: _bot_loggers["botengine"].debug( f"\tRequest id not found: {request_id}" ) for chunk_request_id in chunks: if chunk_request_id not in all_request_ids: _bot_loggers["botengine"].debug( f"\tChunk not found: {chunk_request_id}" ) sorted_keys = sorted(chunks.keys()) try: _bot_loggers["botengine"].debug( f"" f"From: {chunks[sorted_keys[0]][0]}" f"To: {chunks[sorted_keys[-1]][0]}" ) except Exception as e: _bot_loggers["botengine"].debug( f"Error: {e}" ) # Split logs into chunked files lines_per_file = 250000 total_lines = 0 _bot_loggers["botengine"].debug( f"Total lines: {sum([len(chunks[key]) for key in sorted_keys])}" ) for key in sorted_keys: out_filename = os.path.join( log_export_path, f"exported_{total_lines // lines_per_file}.log", ) _bot_loggers["botengine"].debug( f"Writing {len(chunks[key])} lines to {out_filename}" ) with open(out_filename, "a") as f: f.write("".join(chunks[key])) total_lines += len(chunks[key]) _bot_loggers["botengine"].debug( f"Total lines written: {total_lines}" ) _bot_loggers["botengine"].info( "Exported {} log files to {}".format( len(log_files), log_export_path ) ) except BotError as e: _bot_loggers["botengine"].error( f"Could not get the export status: {e}" ) return if not botname and not instance: sys.stderr.write("No bot selected to run, use --help\n") return 1 # Here's where we actually run our bot if botname or instance: if botname: botname = botname.replace("/", "") if botname is not None and organization_id is not None and instance is None: sys.stderr.write( Color.RED + "Missing the bot instance ID.\n" + Color.END ) sys.stderr.write( Color.RED + "To run a bot under an organization: -r -o -i " + Color.END ) sys.stderr.write("\n\n") return 1 if organization_id is not None: location_id = None if not botengine_key: if user_key is None: if organization_id is not None: if admin_username is None or admin_password is None: print( "You want to run a bot inside an organization ID, but not an admin username+password.\nUse --admin_username and --admin_password to perform this operation." ) return -1 user_key = _login( server, admin_username, admin_password, admin=True ) else: user_key = _login(server, username, password) try: if not instance: instance = _get_instance_id_from_bundle_id( server, user_key, botname, challenge_id, location_id=location_id, ) if instance is None: sys.stderr.write( Color.RED + "You must first purchase and configure a bot in your account before you can run it." + Color.END ) sys.stderr.write("\n\n") return 1 print( Color.BOLD + "Bot Instance: {}".format(instance) + Color.END ) except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\n\n") return 2 base_path = os.path.join(os.getcwd(), "." + botname) _merge_redirects( os.path.join(os.getcwd(), botname), base_path, botname, server, args.core_directory, ) # Copy the required botengine and lambda files _merge_botengine(base_path) # Change the current working directory sys.path.insert(0, base_path) bot = importlib.import_module("bot") if "run" in dir(bot): if forever: # Run the bot locally, forever version_file = os.path.join(base_path, RUNTIME_FILENAME) if not os.path.exists(version_file): sys.stderr.write( Color.RED + version_file + " does not exist" + Color.END ) sys.stderr.write( Color.RED + "You must run the BotEngine a level below your bot's directory" + Color.END ) sys.stderr.write("\n\n") return 1 device_server = _get_ensemble_server_url(server) if "http" not in device_server: device_server = "https://" + device_server # Replace the server name with the one we are using during local execution mapping = [ (["sbox2.", "sbox.", "sboxall."], "sbox1."), (["app2.", "app."], "app1."), ] for from_servers, to_server in mapping: for from_server in from_servers: device_server = device_server.replace(from_server, to_server) print("Device Server: " + device_server) print("Running forever, until you press CTRL+Z to quit\n") _run_locally_forever(server, device_server, user_key, bot, instance) else: # Run the bot one time. Usually this is done only on the server. _run(bot, inputs, _bot_loggers["botengine"], local=True) else: sys.stderr.write("This bot does not contain a 'run' method\n\n") _bot_loggers["botengine"].error( "This bot does not contain a 'run' method" ) return 1 return 0 except KeyboardInterrupt: ### handle keyboard interrupt ### return 0 except SystemExit: return 0 except BotError as e: _bot_loggers["botengine"].error("BotEngine Error: " + e.msg) return 2 except Exception: # e = sys.exc_info()[0] # if DEBUG or TESTRUN: # raise(e) import traceback s = traceback.format_exc() sys.stderr.write(s + "\n\n") _bot_loggers["botengine"].error(s) return 3 def _merge_redirects( bot_directory, merge_directory, bundle_id, server=DEFAULT_BASE_SERVER_URL, core_directory=None, ): """ Merge the bot_directory with any other bot it redirects to, into the merge_directory - to produce the final branded bot. :param bot_directory: Original bot directory :param merge_directory: Destination directory to produce the final, merged bot files :param bundle_id: Bundle ID :param server: Server address to generate a bot for, which gets declared in a bundle.py file :param core_directory: For repository architectures that separate a common core repository from a private proprietary repository, this is the absolute path to the core repository directory :return: final directory """ import shutil # Remove the last commit if os.path.isdir(merge_directory): shutil.rmtree(merge_directory, ignore_errors=True) merge_list = [bot_directory] # First gather the list of redirects while True: extends_bundle = _get_extends_bundle(merge_list[-1]) if extends_bundle is not None and extends_bundle.strip() != "": found = False local_bundle_path = os.path.join(os.getcwd(), extends_bundle) if os.path.exists(local_bundle_path): found = True merge_list.append(local_bundle_path) if core_directory is not None: core_bundle_path = os.path.join(core_directory, extends_bundle) if os.path.exists(core_bundle_path): found = True merge_list.append(core_bundle_path) if not found: print( Color.RED + "Error: Cannot find extension '{}' from structure.json in directory {}. Core directory is {}.".format( extends_bundle, merge_list[-1], core_directory ) + Color.END ) exit(1) else: break # Merge all foundational layers of the bot in reverse order so the first one gets precedence for source in reversed(merge_list): shutil.copytree(source, merge_directory, dirs_exist_ok=True) # DISTRIBUTED STRUCTURE.JSON FILES pip_install = [] # Common Python dependencies that are required for all bots pip_install_remotely = [ "dill", "requests<2.30.0", "urllib3<2", "python-dateutil", "pytz", "colorama", "pydantic", ] microservices = [] safe_delete_microservices = [] base_directories = [os.getcwd()] if core_directory is not None: base_directories.append(core_directory) # Produce a base list of microservices and Python dependences pip_install = list( set(set(pip_install) | set(_extract_packages(merge_directory, True))) ) pip_install_remotely = prepare_dependencies( list( set( set(pip_install_remotely + pip_install) | set(_extract_packages(merge_directory, False)) ) ) ) microservices = list( set(set(microservices) | set(_extract_microservice_links(merge_directory))) ) safe_delete_microservices = list( set( set(safe_delete_microservices) | set(_extract_safe_delete_microservice_links(merge_directory)) ) ) # Recursively copy in microservices, then traverse the new directory structure # looking for additional structure.json files to see if we need to add more microservices while True: for source in microservices + safe_delete_microservices: found = False # The source typically looks like "com.ppc.Microservices/intelligence/something" # The destination should remove the initial 'com.ppc.Microservices' directory # We grab all the text after the first '/' character using: source[source.find(os.sep)+1:] destination = os.path.join( merge_directory, source[source.find(os.sep) + 1 :] ) if not os.path.exists(destination): os.makedirs(destination) def copy_bot_services(local_source, destination): """ Copy files from the local source to the destination :param local_source: Local source file :param destination: Destination file""" if os.path.isdir(local_source): # Ignore __pycache__ directories if "__pycache__" in local_source: return shutil.copytree( local_source, destination, dirs_exist_ok=True, copy_function=copy_bot_services, ) else: # Ignore .pyc files if local_source.endswith(".pyc"): return if not os.path.exists(destination): shutil.copy2(local_source, destination) else: if Path(local_source).parts[-1] == MICROSERVICES_INDEX_FILENAME: # Merge the microservices index files # Dump and load to ensure the JSON is formatted correctly index_json = json.loads( json.dumps( _extract_microservices_from_index(local_source) ) ) destination_index_json = json.loads( json.dumps( _extract_microservices_from_index(destination) ) ) # Combine the microservices index files if DEVICE_MICROSERVICES_KEY in index_json: if ( DEVICE_MICROSERVICES_KEY not in destination_index_json ): destination_index_json[ DEVICE_MICROSERVICES_KEY ] = {} for device_type in index_json[DEVICE_MICROSERVICES_KEY]: if ( device_type not in destination_index_json[ DEVICE_MICROSERVICES_KEY ] ): destination_index_json[ DEVICE_MICROSERVICES_KEY ][device_type] = [] for service in index_json[DEVICE_MICROSERVICES_KEY][ device_type ]: if ( service not in destination_index_json[ DEVICE_MICROSERVICES_KEY ][device_type] ): destination_index_json[ DEVICE_MICROSERVICES_KEY ][device_type].append(service) index_keys = [ LOCATION_MICROSERVICES_KEY, DATA_FILTERS_KEY, ORGANIZATION_MICROSERVICES_KEY, ] for key in index_keys: if key in index_json: if key not in destination_index_json: destination_index_json[key] = [] for service in index_json[key]: if service not in destination_index_json[key]: destination_index_json[key].append(service) with open(destination, "w") as outfile: json.dump(destination_index_json, outfile, indent=2) elif Path(local_source).parts[-1] == RUNTIME_FILENAME: # Merge the runtime files # Dump and load to ensure the JSON is formatted correctly runtime_json = json.loads( json.dumps(_extract_json_from_file(local_source)) ) destination_runtime_json = json.loads( json.dumps(_extract_json_from_file(destination)) ) # Combine the runtime files destination_runtime_json = _merge_runtime_json( destination_runtime_json, runtime_json ) with open(destination, "w") as outfile: json.dump(destination_runtime_json, outfile, indent=2) elif Path(local_source).parts[-1] == STRUCTURE_FILENAME: # Merge the structure files # Dump and load to ensure the JSON is formatted correctly structure_json = json.loads( json.dumps(_extract_json_from_file(local_source)) ) destination_structure_json = json.loads( json.dumps(_extract_json_from_file(destination)) ) # Combine the structure files if "pip_install_remotely" in structure_json: if ( "pip_install_remotely" not in destination_structure_json ): destination_structure_json[ "pip_install_remotely" ] = [] for package in structure_json["pip_install_remotely"]: if ( package not in destination_structure_json[ "pip_install_remotely" ] ): destination_structure_json[ "pip_install_remotely" ].append(package) if "microservices" in structure_json: if "microservices" not in destination_structure_json: destination_structure_json["microservices"] = [] for microservice in structure_json["microservices"]: if ( microservice not in destination_structure_json[ "microservices" ] ): destination_structure_json[ "microservices" ].append(microservice) if "safe_delete_microservices" in structure_json: if ( "safe_delete_microservices" not in destination_structure_json ): destination_structure_json[ "safe_delete_microservices" ] = [] for microservice in structure_json[ "safe_delete_microservices" ]: if ( microservice not in destination_structure_json[ "safe_delete_microservices" ] ): destination_structure_json[ "safe_delete_microservices" ].append(microservice) with open(destination, "w") as outfile: json.dump(destination_structure_json, outfile, indent=2) else: shutil.copy2(local_source, destination) if core_directory is not None: core_source = os.path.join(core_directory, source) if os.path.exists(core_source): found = True shutil.copytree( core_source, destination, dirs_exist_ok=True, copy_function=copy_bot_services, ) local_source = os.path.join(os.getcwd(), source) if os.path.exists(local_source): found = True shutil.copytree( local_source, destination, dirs_exist_ok=True, copy_function=copy_bot_services, ) if not found: print( Color.RED + "Error: Cannot find microservice package '{}'.\n".format(source) + Color.END ) exit(1) if source in safe_delete_microservices: index_file = os.path.join(destination, MICROSERVICES_INDEX_FILENAME) if os.path.exists(index_file): os.remove(index_file) # Review all microservices that were copied in and see if there are more dependencies to add new_microservices = microservices pip_install = list( set(set(pip_install) | set(_extract_packages(merge_directory, True))) ) pip_install_remotely = prepare_dependencies( list( set( set(pip_install_remotely + pip_install) | set(_extract_packages(merge_directory, False)) ) ) ) new_microservices = list( set( set(new_microservices) | set(_extract_microservice_links(merge_directory)) ) ) if len(new_microservices) != len(microservices): # There was a new microservice package dependency added. Run the process again and see if we find more dependencies. # It's not very efficient for execution, but this is efficient for my time that's better spent solving other problems. microservices = new_microservices else: # We've run to the end of the road for adding microservice package dependencies break with open(os.path.join(merge_directory, STRUCTURE_FILENAME), "w") as outfile: json.dump( {"pip_install_remotely": pip_install_remotely}, outfile, indent=2, sort_keys=True, ) with open(os.path.join(merge_directory, "bundle.py"), "w") as outfile: outfile.write('BUNDLE_ID = "{}"\n'.format(bundle_id)) outfile.write('CLOUD_ADDRESS = "{}"\n'.format(server)) # Include the Bot Type. See BotEngine.BOT_TYPE_... with open( os.path.join(merge_directory, MARKETING_FILENAME), "r" ) as marketing_file: marketing_text = "" for line in marketing_file: line = line.strip() if not line.startswith("#"): marketing_text += line try: marketing_data = json.loads(marketing_text) # print("Marketing Data: " + json.dumps(marketing_data)) if "type" not in marketing_data.get("app", {}): raise Exception("No type in marketing.json file") outfile.write('BOT_TYPE = "{}"\n'.format(marketing_data["app"]["type"])) except Exception: # print("Error parsing marketing.json file, using default BOT_TYPE [e: {}]".format(e)) pass # DISTRIBUTED MICROSERVICES INDEX.PY FILES index_filename = os.path.join(merge_directory, MICROSERVICES_INDEX_FILENAME) # Data filters: { "module_name_key" : ("class_name_value", execution_priority), ... } data_filters = {} # Devices: { device_type: { "module_name_key": ("class_name_value", execution_priority) }, ... } device_microservices = {} # Locations: { "module_name_key" : ("class_name_value", execution_priority), ... } location_microservices = {} # Organizations: { "module_name_key" : ("class_name_value", execution_priority), ... } organization_microservices = {} for current_dir, dirs, files in os.walk(merge_directory): if os.path.exists(os.path.join(current_dir, MICROSERVICES_INDEX_FILENAME)): index_json = _extract_microservices_from_index( os.path.join(current_dir, MICROSERVICES_INDEX_FILENAME) ) if DEVICE_MICROSERVICES_KEY in index_json: for device_type in index_json[DEVICE_MICROSERVICES_KEY]: if device_type not in device_microservices: device_microservices[device_type] = {} for service in index_json[DEVICE_MICROSERVICES_KEY][device_type]: device_microservices[device_type][service["module"]] = ( service["class"], service.get("execution_priority", 0), ) if LOCATION_MICROSERVICES_KEY in index_json: for service in index_json[LOCATION_MICROSERVICES_KEY]: location_microservices[service["module"]] = ( service["class"], service.get("execution_priority", 0), ) if DATA_FILTERS_KEY in index_json: for service in index_json[DATA_FILTERS_KEY]: data_filters[service["module"]] = ( service["class"], service.get("execution_priority", 0), ) if ORGANIZATION_MICROSERVICES_KEY in index_json: for service in index_json[ORGANIZATION_MICROSERVICES_KEY]: organization_microservices[service["module"]] = ( service["class"], service.get("execution_priority", 0), ) merged_index = { DEVICE_MICROSERVICES_KEY: {}, DATA_FILTERS_KEY: [], LOCATION_MICROSERVICES_KEY: [], ORGANIZATION_MICROSERVICES_KEY: [], } for device_type in device_microservices: merged_index[DEVICE_MICROSERVICES_KEY][device_type] = [] for module_name in device_microservices[device_type]: merged_index[DEVICE_MICROSERVICES_KEY][device_type].append( { "module": module_name, "class": device_microservices[device_type][module_name][0], "execution_priority": device_microservices[device_type][ module_name ][1], } ) for module_name in location_microservices: merged_index[LOCATION_MICROSERVICES_KEY].append( { "module": module_name, "class": location_microservices[module_name][0], "execution_priority": location_microservices[module_name][1], } ) for module_name in data_filters: merged_index[DATA_FILTERS_KEY].append( { "module": module_name, "class": data_filters[module_name][0], "execution_priority": data_filters[module_name][1], } ) for module_name in organization_microservices: merged_index[ORGANIZATION_MICROSERVICES_KEY].append( { "module": module_name, "class": organization_microservices[module_name][0], "execution_priority": organization_microservices[module_name][1], } ) with open(index_filename, "w") as outfile: outfile.write( "MICROSERVICES = " + json.dumps(merged_index, indent=2, sort_keys=True) ) # To save memory and just get a fingerprint of each microservice package, we take the end name of the microservice package truncated_microservices = [] for microservice in microservices: if microservice.endswith(os.sep): microservice = microservice[:-1] truncated_microservices.append(microservice.split(os.sep)[-1]) # DISTRIBUTED RUNTIME.JSON FILES runtime_json = { "version": { # Default timeout for bots in seconds (the server's default is 1.0 second) "timeout": DEFAULT_RUNTIME_TIMEOUT_S, "microservices": truncated_microservices, } } for current_dir, dirs, files in os.walk(merge_directory): if os.path.exists(os.path.join(current_dir, RUNTIME_FILENAME)): # print("Evaluating: {}".format(os.path.join(current_dir, RUNTIME_FILENAME))) runtime_json = _merge_runtime_json( runtime_json, _extract_json_from_file(os.path.join(current_dir, RUNTIME_FILENAME)), ) runtime_filename = os.path.join(merge_directory, RUNTIME_FILENAME) with open(runtime_filename, "w") as outfile: json.dump(runtime_json, outfile, indent=2, sort_keys=True) # Add extra bot information about this particular bot info_filename = os.path.join(merge_directory, INFO_FILENAME) if bot_directory.endswith(os.sep): bot_directory = bot_directory[:-1] info = 'BUNDLE = "{}"'.format(bot_directory.split(os.sep)[-1]) with open(info_filename, "w") as outfile: outfile.write(info) def _merge_botengine(destination, aws_lambda=True): """ Find the best 'botengine' file and move it into the destination directory, with the best extension :param destination: Destination directory for the botengine :param aws_lambda: True if this will execute on AWS Lambda (default) which requires its own considerations """ import shutil botengine_path = __file__ botengine_bytecode_path = os.path.join( os.path.dirname(botengine_path), "botengine_bytecode" ) lambda_path = os.path.join(os.path.dirname(botengine_path), "lambda.py") bash = False if os.path.exists(botengine_path): with open(botengine_path, "r") as f: bash = "#!/bin/bash" in f.readline() if not bash: if os.path.exists(botengine_bytecode_path): # Recompile os.remove(botengine_bytecode_path) import py_compile py_compile.compile(botengine_path, botengine_bytecode_path) if aws_lambda: if os.path.exists(botengine_path) and not bash: # Let the server compile botengine shutil.copyfile(botengine_path, os.path.join(destination, "botengine.py")) elif os.path.exists(botengine_bytecode_path): # Upload a pre-compiled version of the botengine shutil.copyfile( botengine_bytecode_path, os.path.join(destination, "botengine.pyc") ) if os.path.exists(lambda_path): shutil.copyfile(lambda_path, os.path.join(destination, "lambda.py")) else: if os.path.exists(botengine_path): shutil.copyfile(botengine_path, os.path.join(destination, "botengine")) if os.path.exists(botengine_bytecode_path): shutil.copyfile( botengine_bytecode_path, os.path.join(destination, "botengine_bytecode") ) def _merge_runtime_json(primary, secondary): """ Merge 2 runtime JSON dictionary structures. :param primary: Takes precedence whenever there's a conflict :param secondary: Secondary JSON runtime structure that can get overwritten when there's a conflict. :return: a final runtime dictionary structure """ primary = primary["version"] secondary = secondary["version"] return_json = {} # Select one version number if "version" in primary: return_json["version"] = primary["version"] elif "version" in secondary: return_json["version"] = secondary["version"] # Select one 'whatsnew' if "whatsnew" in primary: return_json["whatsnew"] = primary["whatsnew"] elif "whatsnew" in secondary: return_json["whatsnew"] = secondary["whatsnew"] # Select one runtime type if "runtime" in primary: return_json["runtime"] = primary["runtime"] elif "runtime" in secondary: return_json["runtime"] = secondary["runtime"] # Select one maxPurchaseOccurrence if "maxPurchaseOccurrence" in primary: return_json["maxPurchaseOccurrence"] = primary["maxPurchaseOccurrence"] elif "maxPurchaseOccurrence" in secondary: return_json["maxPurchaseOccurrence"] = secondary["maxPurchaseOccurrence"] # Combine all triggers trigger = 0 if "trigger" in primary: trigger |= primary["trigger"] if "trigger" in secondary: trigger |= secondary["trigger"] if trigger > 0: return_json["trigger"] = trigger # Select the largest amount of memory required memory = 0 if "memory" in primary: if primary["memory"] > memory: memory = primary["memory"] if "memory" in secondary: if secondary["memory"] > memory: memory = secondary["memory"] if memory > 0: return_json["memory"] = memory # Select the largest execution timeout required timeout = 0 if "timeout" in primary: if primary["timeout"] > timeout: timeout = primary["timeout"] if "timeout" in secondary: if secondary["timeout"] > timeout: timeout = secondary["timeout"] if timeout > 0: return_json["timeout"] = timeout # Combine all schedules and get rid of 'schedule' schedules = {} if "schedule" in primary: schedules["DEFAULT"] = primary["schedule"] elif "schedule" in secondary: schedules["DEFAULT"] = secondary["schedule"] if "schedules" in secondary: for schedule_id in secondary["schedules"]: schedules[schedule_id] = secondary["schedules"][schedule_id] if "schedules" in primary: for schedule_id in primary["schedules"]: schedules[schedule_id] = primary["schedules"][schedule_id] return_json["schedules"] = schedules # Combine all data stream messages without duplicates datastreams = set([]) if "dataStreams" in primary: for d in primary["dataStreams"]: datastreams.add(d["address"]) if "dataStreams" in secondary: for d in secondary["dataStreams"]: datastreams.add(d["address"]) if len(datastreams) > 0: return_json["dataStreams"] = [] for d in datastreams: return_json["dataStreams"].append({"address": d}) return_json["dataStreams"].sort(key=lambda x: x["address"].lower()) # Combine all trigger parameters without duplicates trigger_parameters = set([]) if "triggerParameters" in primary: trigger_parameters.update(primary["triggerParameters"]) if "triggerParameters" in secondary: trigger_parameters.update(secondary["triggerParameters"]) if len(trigger_parameters) > 0: return_json["triggerParameters"] = list(trigger_parameters) return_json["triggerParameters"].sort(key=lambda x: x.lower()) microservices = set([]) if "microservices" in primary: microservices.update(primary["microservices"]) if "microservices" in secondary: microservices.update(secondary["microservices"]) if len(microservices) > 0: return_json["microservices"] = list(microservices) return_json["microservices"].sort(key=lambda x: x.lower()) # Putting this under triggerParameters because they both require the same server version # Select the appropriate goalRule flag to prevent the server from making default rules if "goalRule" in primary: return_json["goalRule"] = primary["goalRule"] elif "goalRule" in secondary: return_json["goalRule"] = secondary["goalRule"] # Combine all communications if "communications" in primary or "communications" in secondary: communications = [] if "communications" in primary: communications = primary["communications"] if "communications" in secondary: for secondary_comms_block in secondary["communications"]: found = False for primary_comms_block in communications: if ( primary_comms_block["category"] == secondary_comms_block["category"] ): # Same category found = True for item in primary_comms_block: if item in secondary_comms_block: # logical-OR together values from each dictionary so False | True => True and category numbers don't change primary_comms_block[item] |= secondary_comms_block[item] for item in secondary_comms_block: if item not in primary_comms_block: # Make sure we don't drop any new items from the secondary communications block primary_comms_block[item] = secondary_comms_block[item] break if not found: communications.append(secondary_comms_block) return_json["communications"] = communications return_json["communications"].sort(key=lambda x: x["category"]) # Combine all access privileges if "access" in primary or "access" in secondary: access = [] if "access" in primary: access = primary["access"] if "access" in secondary: for secondary_access_block in secondary["access"]: found = False for primary_access_block in access: if ( primary_access_block["category"] == secondary_access_block["category"] ): found = True for item in ["trigger", "read", "control"]: if item in primary_access_block: if item in secondary_access_block: primary_access_block[item] |= ( secondary_access_block[item] ) elif item in secondary_access_block: primary_access_block[item] = secondary_access_block[ item ] break if not found: access.append(secondary_access_block) return_json["access"] = access return_json["access"].sort(key=lambda x: x["category"]) # Combine all device types if "deviceTypes" in primary or "deviceTypes" in secondary: device_types = [] if "deviceTypes" in primary: device_types = primary["deviceTypes"] if "deviceTypes" in secondary: for secondary_dt_block in secondary["deviceTypes"]: found = False control = False read = False trigger = False if "read" in secondary_dt_block: read = secondary_dt_block["read"] if "control" in secondary_dt_block: control = secondary_dt_block["control"] if "trigger" in secondary_dt_block: trigger = secondary_dt_block["trigger"] for primary_dt_block in device_types: if primary_dt_block["id"] == secondary_dt_block["id"]: found = True if "read" in primary_dt_block: read |= primary_dt_block["read"] if "control" in primary_dt_block: control |= primary_dt_block["control"] if "trigger" in primary_dt_block: trigger |= primary_dt_block["trigger"] primary_dt_block["read"] = read primary_dt_block["control"] = control primary_dt_block["trigger"] = trigger break if not found: device_types.append(secondary_dt_block) return_json["deviceTypes"] = device_types return_json["deviceTypes"].sort(key=lambda x: x["id"]) return_json = {k:v for k, v in return_json.items() if v} return {"version": return_json} def _extract_microservices_from_index(file_location): """ Extract the DEVICE_MICROSERVICES and LOCATION_MICROSERVIECS from the index.py file found at the file location :param file_location: Absolute location of the index.py file :return: JSON structure """ with open(file_location, "r") as f: index_text = "" for line in f: if not line.strip().startswith("#"): index_text += line.strip() try: index_text = index_text.replace(" ", "") index_text = index_text.replace("MICROSERVICES=", "") index_json = eval(index_text) except SyntaxError as e: print(Color.RED + "Problem with: " + str(file_location) + Color.END) raise e return index_json return {} def _extract_json_from_file(file_location): """ Extract JSON content from a file :param file_location: :return: """ with open(file_location, "r") as f: content = "" for line in f: if not line.strip().startswith("#"): content += line.strip() return json.loads(content) return {} def _extract_packages(directory, local=True): """ Extract a list of pip install packages from the structure.json file in the given directory :param directory: Directory that might have a structure.json file in it :param local: True to extract the packages names that should be installed locally :return: A list """ pip_install = [] if os.path.exists(directory): for current_dir, dirs, files in os.walk(directory): if os.path.exists(os.path.join(current_dir, STRUCTURE_FILENAME)): structure_text = "" with open(os.path.join(current_dir, STRUCTURE_FILENAME), "r") as f: for line in f: if not line.strip().startswith("#"): structure_text += line structure_json = json.loads(structure_text) if local: if "pip_install" in structure_json: for item in structure_json["pip_install"]: if item not in pip_install: pip_install.append(item) else: if "pip_install_remotely" in structure_json: for item in structure_json["pip_install_remotely"]: if item not in pip_install: pip_install.append(item) return pip_install def prepare_dependencies(deps): """ Allow Python packages that have specified a version number to take precedence over similarly named packages that do not specify a version. Contributed by Tyler Garner :param deps: Dependencies """ split = map(lambda x: x.split("=="), deps) dependencies = dict() for package, *version in list(split): # dependency is already specified and this entry does not have a version. if dependencies.get(package) and not version: continue # dependency is already specified and this entry has a different version. if dependencies.get(package) and version: print( "Error during dependency resolution. Two versions of the same dependency are being requested. Dependency: {}, version requested {}, version already requested {}.".format( package, version, dependencies.get(package) ) ) continue dependencies[package] = version[0] if version else "" return [ ("==".join([dep, ver]) if dep and ver else dep) for dep, ver in dependencies.items() ] def _extract_microservice_links(directory): """ Extract a list of microservice directories from the structure.json file in the given directory :param directory: :return: """ microservices = [] if os.path.exists(directory): for current_dir, dirs, files in os.walk(directory): if os.path.exists(os.path.join(current_dir, STRUCTURE_FILENAME)): structure_text = "" with open(os.path.join(current_dir, STRUCTURE_FILENAME), "r") as f: for line in f: if not line.strip().startswith("#"): structure_text += line structure_json = json.loads(structure_text) if "microservices" in structure_json: for item in structure_json["microservices"]: item = item.replace("/", os.sep).replace("\\", os.sep) if item not in microservices: microservices.append(item) return microservices def _extract_safe_delete_microservice_links(directory): """ Extract a list of microservice directories to safely delete, from the structure.json file in the given directory :param directory: :return: """ microservices = [] for current_dir, dirs, files in os.walk(directory): if os.path.exists(os.path.join(current_dir, STRUCTURE_FILENAME)): structure_text = "" with open(os.path.join(current_dir, STRUCTURE_FILENAME), "r") as f: for line in f: if not line.strip().startswith("#"): structure_text += line structure_json = json.loads(structure_text) if "safe_delete_microservices" in structure_json: for item in structure_json["safe_delete_microservices"]: item = item.replace("/", os.sep).replace("\\", os.sep) if item not in microservices: microservices.append(item) return microservices def _get_extends_bundle(search_directory): """ If the structure.json file in the given directory declares that we should extend some other bot, this method will return the extension value representing that other bot. :param search_directory: Search this directory for a structure.json file that may declare we extend a different directory :return: Extension directory name if it exists, None if it doesn't exist """ if os.path.exists(os.path.join(search_directory, STRUCTURE_FILENAME)): structure_text = "" with open(os.path.join(search_directory, STRUCTURE_FILENAME), "r") as f: for line in f: if not line.strip().startswith("#"): structure_text += line try: structure_json = json.loads(structure_text) except ValueError as e: print( Color.BOLD + Color.RED + "Error loading JSON content from {}".format( os.path.join(search_directory, STRUCTURE_FILENAME) ) + Color.END ) print("Error: {}".format(e) + "\n") exit(1) if "extends" in structure_json: return structure_json["extends"] return None def _get_instance_id_from_bundle_id( server, user_key, bundle_id, challenge_id=None, location_id=None ): """Get the instance ID from the bundle ID :param server: Server to use. :param user_key: User's /cloud API key. :param bundle_id: Bundle ID to find an bot instance for. :param challenge_id: Challange id which was used for creating bot instance id. :param location_id: Location ID """ bots = _botstore_mybots(server, user_key, location_id=location_id) if bots is None: print("No bots") return None potential_apps = [] for bot in bots: bundle = bot.get("bundle") if bundle is None: bundle = bot.get("bot", {}).get("bundle") if bundle == bundle_id and bot["status"] >= 0 and bot["status"] < 5: # The bundle ID matches and the bot is active try: nickname = bot["nickname"] except KeyError: nickname = bot["name"] potential_apps.append((nickname, bot["appInstanceId"])) if len(potential_apps) == 0: print("No potential bots") return None if len(potential_apps) == 1: # You have no choice (n, i) = potential_apps[0] instance = i elif challenge_id: if challenge_id != "0": for bot in bots: try: if int(challenge_id) == bot["access"][0]["challengeId"]: instance = bot["appInstanceId"] except Exception as e: print(f"Error: {e}") pass else: instance = 0 for bot in bots: try: if ( bot["access"][0]["challengeId"] and bot["appInstanceId"] > instance ): instance = bot["appInstanceId"] except Exception as e: print(f"Error: {e}") pass print(Color.BOLD + "Bot instance ID: " + str(instance) + Color.END) else: # Pick an bot print( Color.BOLD + "Here are your available bot instances that match this bundle ID:" + Color.END ) selection = None while selection is None: for n, i in potential_apps: print("\t" + Color.BOLD + str(i) + Color.END + " : " + n, end="") for bot in bots: if bot["appInstanceId"] == i: try: challengeId = bot["access"][0]["challengeId"] print( " (" + Color.BOLD + "challengeId = " + str(challengeId) + Color.END + ")", end="", ) except Exception as e: print(f"Error: {e}") pass print() selection = input("Which bot instance should we execute: ") ok = False for n, i in potential_apps: if str(i) == selection: ok = True break if not ok: selection = None else: instance = selection return instance def _get_local_files(local_dir, walk=False): """ Retrieve local files list result_list == a list of dictionaries with path and mtime keys. ex: {'path':,'mtime':} ignore_dirs == a list of directories to ignore, should not include the base_dir. ignore_files == a list of files to ignore. ignore_file_ext == a list of extentions to ignore. """ result_list = [] ignore_dirs = ["CVS", ".svn", ".git", "__pycache__"] ignore_files = [ ".project", ".pydevproject", MARKETING_FILENAME, RUNTIME_FILENAME, "icon.png", ] ignore_file_ext = [".pyc"] base_dir = os.path.abspath(local_dir) for current_dir, dirs, files in os.walk(base_dir): for this_dir in ignore_dirs: if this_dir in dirs: dirs.remove(this_dir) sub_dir = current_dir.replace(base_dir, "") if not walk and sub_dir: break for this_file in files: if ( this_file not in ignore_files and os.path.splitext(this_file)[-1].lower() not in ignore_file_ext ): file_path = os.path.join(current_dir, this_file) file_monitor_dict = { "path": file_path, "mtime": os.path.getmtime(file_path), } result_list.append(file_monitor_dict) return result_list def _get_cloud_version(server): """ Get Cloud version :return: (major,minor) """ version = _get_cloud_server_version(server) if version: semantic_version = version.split(".") major = int(semantic_version[0]) minor = 0 if len(semantic_version) > 1: minor = int(semantic_version[1]) return (major, minor) return (0, 0) def _get_cloud_api_server(server): """ Get Cloud API server :return: Cloud API server string """ j = _get_connection_settings(server) host = server if "clouds" in j and len(j["clouds"]) > 0 and "servers" in j["clouds"][0]: for server in j["clouds"][0]["servers"]: if "type" in server and "host" in server and server["type"] == "appapi": host = server["host"] if "http" not in host: host = "https://" + host return host def _get_connection_settings(server): """ Get Connection settings https://iotapps.docs.apiary.io/#reference/cloud-connectivity/cloud-settings/get-connection-settings :return: Cloud settings json """ global _https_proxy import requests http_headers = {"Content-Type": "application/json"} r = requests.get( server + "/cloud/json/settings", headers=http_headers, proxies=_https_proxy ) j = json.loads(r.text) _check_for_errors(j) return j def _get_cloud_server_version(server): """ Get Cloud Version https://iotapps.docs.apiary.io/#reference/cloud-connectivity/version :return: Version string """ global _https_proxy import requests http_headers = {"Content-Type": "text/html"} r = requests.get( server + "/espapi/version?version=true", headers=http_headers, proxies=_https_proxy, ) return r.text def _is_server_version_newer_than(server, major, minor): """ Compare the server version. :param major: Major value :param minor: Minor value :return: True if the server version is greater than or equal to the major and minor values """ _major, _minor = _get_cloud_version(server) return int(_major) > int(major) or ( int(_major) == int(major) and int(_minor) >= int(minor) ) def _get_ensemble_server_url(server, device_id=None): """Get Ensemble server URL""" global _https_proxy import requests http_headers = {"Content-Type": "application/json"} params = {"type": "deviceio", "ssl": True} if not device_id: # to be removed params["deviceId"] = "nodeviceid" else: params["deviceId"] = device_id r = requests.get( server + "/cloud/json/settingsServer", params=params, headers=http_headers, proxies=_https_proxy, ) return r.text def _login(server, username, password, admin=False): """ Login and obtain an API key :param server: Server address :param username: Username :param password: Password :return: API Key """ global _https_proxy import pickle if not username: username = input("Email address: ") if not password: import getpass password = getpass.getpass() try: import requests type = "user" if admin: type = "admin" fixed_server = ( server.replace("http://", "").replace("https://", "").split(".")[0] ) filename = "{}.{}.{}".format(username, fixed_server, type) # Load the stored key, refreshing it if it is about to expire. e.g.:username.app.user={ "key": key, "key_expires_ms": key_expire_ms } if os.path.isfile(filename): try: with open(filename, "rb") as f: key = pickle.load(f) if isinstance(key, dict): key_expire_ms = key["key_expires_ms"] if key_expire_ms > time.time() * 1000 * 10: # The key is still valid return key["key"] key = key["key"] params = {"keyType": 0} if admin: params["keyType"] = 11 params["expiry"] = 2 http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.get( server + "/cloud/json/loginByKey", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) key_json = {"key": key, "key_expires_ms": j["keyExpireMs"]} with open(filename, "wb") as f: pickle.dump(key_json, f) return key except Exception as e: print("Could not log in with existing key: {}".format(e)) pass params = {"username": username} if admin: params["keyType"] = 11 pref_delivery_type = input('To use SMS to get your passcode type "y"?: ') pref_delivery_type = ( 3 if len(pref_delivery_type) == 0 or pref_delivery_type[0].lower() == "y" else 2 ) params["prefDeliveryType"] = pref_delivery_type http_headers = {"PASSWORD": password, "Content-Type": "application/json"} r = requests.get( server + "/cloud/json/login", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) if j["resultCode"] == 17: passcode = input("Type in the passcode you received on your phone: ") passcode = passcode.upper() params["expiry"] = 2 http_headers["passcode"] = passcode del http_headers["PASSWORD"] r = requests.get( server + "/cloud/json/login", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) key = j["key"] key_json = {"key": key, "key_expires_ms": j["keyExpireMs"]} with open(filename, "wb") as f: pickle.dump(key_json, f) return key except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\nCreate an account on " + server + " and use it to sign in") sys.stderr.write("\n\n") raise e # except Exception: # sys.stderr.write(Color.RED + "Error trying to contact the server.\n\n" + Color.END) # exit(1) def _get_location_info(cloud_url, user_key, location_id): """ Get information about a specific location https://iotapps.docs.apiary.io/#reference/user-accounts/manage-a-user/get-user-information :param cloud_url: Cloud URL :param admin_key: Administrative API key :param location_id: Location ID :return: JSON content information about the location, None if we can't get it """ user_info = _get_user_info(cloud_url, user_key) for location in user_info["locations"]: if location["id"] == location_id: return location return None def _get_user_info(server, user_key, user_id=None): """ Get the user info https://iotapps.docs.apiary.io/#reference/user-accounts/manage-a-user/get-user-information :param server: Server address :param user_key: User API key :param user_id: User ID for administrator access """ import requests global _https_proxy try: # get user info http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} params = {} if user_id is not None: params["userId"] = user_id r = requests.get( server + "/cloud/json/user", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j except BotError as e: sys.stderr.write("BotEngine Get User Info Error: " + e.msg) sys.stderr.write("\n\n") raise e def _get_organization_locations(server, user_key, organization_id): """ Get a list of locations from an organization The actual API has a lot more search and filtering options than we've brought out here. https://iotadmins.docs.apiary.io/#reference/users-and-locations/locations-in-an-organization/get-locations :param server: :param user_key: :param organization_id: :return: """ import requests global _https_proxy try: params = {"organizationId": organization_id} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.get( server + "/admin/json/locations", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j.get("locations", []) except BotError as e: sys.stderr.write("BotEngine Error: " + e.msg) sys.stderr.write("\n\n") raise e def _get_botengine_key(server, user_key, bot_key_type, bot_instance_id): """Get a BotEngine API key by appKey :param server: Server to use :param user_key: User's /cloud API key :param bot_key_type: 0 = normal bot; 1 = developer bot :param bot_instance_id: Application instance ID to execute """ import requests global _https_proxy http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} params = {"appType": bot_key_type, "appInstanceId": bot_instance_id} r = requests.get( server + "/analytic/appkey", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j["key"] def _compile(): """Compile all .py files to .pyc""" pass def _create_or_update_app(server, key, bundle, data, team_name=None): """Create or update the bot""" import requests global _https_proxy j = json.dumps(data) http_headers = {"API_KEY": key, "Content-Type": "application/json"} url = server + "/cloud/developer/apps?bundle=" + bundle if team_name is not None: url += "&developerTeam=" + team_name r = requests.put(url, headers=http_headers, data=j, proxies=_https_proxy) try: j = json.loads(r.text) except json.decoder.JSONDecodeError as e: print("Cannot decode JSON response from server: {}".format(r.text)) raise e if j["resultCode"] == 9: if team_name is None: default_team = bundle.split(".")[1] team_name = input( "What is the name of the team who will collaborate on this bot? (Hit enter to accept '{}'): ".format( default_team ) ) if team_name == "": team_name = default_team _create_team(server, key, team_name) _create_or_update_app(server, key, bundle, data, team_name) else: _check_for_errors(j) return j def _create_team(server, key, name, description=None): """Create a team""" import requests global _https_proxy body = {"name": name} if description is not None: body["description"] = description http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.post( server + "/cloud/developer/teams", data=json.dumps(body), headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) if j["resultCode"] != 26: _check_for_errors(j) return j def _add_team_member(server, key, name, user_id): """Add team member to a existing developer team""" import requests global _https_proxy params = {"userId": user_id} http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.post( server + "/cloud/developer/teams/" + name, params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _delete_team_member(server, key, name, user_id): """Delete team member from a developer team""" import requests global _https_proxy params = {"userId": user_id} http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.delete( server + "/cloud/developer/teams/" + name, params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _get_team_members(server, key, team_name, bundle): """Get team members""" import requests global _https_proxy http_headers = {"API_KEY": key, "Content-Type": "application/json"} params = {} if team_name is not None and team_name != "": params["teamName"] = team_name if bundle is not None and bundle != "": params["bundle"] = bundle r = requests.get( server + "/cloud/developer/teams", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _get_apps(server, key, bundle=None): """ Get created bots :param server: Server address :param key: API key :param bundle: Bundle ID :return: List of bots """ _bot_loggers["botengine"].debug(">_get_apps() bundle={}".format(bundle)) import requests global _https_proxy http_headers = {"API_KEY": key, "Content-Type": "application/json"} params = {} if bundle is not None: params["bundle"] = bundle r = requests.get( server + "/cloud/developer/apps", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info("<_get_apps() response={}".format(j)) return j def _upload_icon(server, key, bundle, filePath): """Upload the icon using 1024x1024 px png""" import requests global _https_proxy with open(filePath, "rb") as payload: http_headers = {"API_KEY": key, "Content-Type": "image/png"} r = requests.put( server + "/cloud/developer/objects/icon?bundle=" + bundle, headers=http_headers, data=payload, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _check_bot_processing(server, key, request_id): """ Check to see if the bot got processed correctly :param server: :param key: :param request_id: :return: response from the server """ import requests global _https_proxy http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.get( server + "/cloud/developer/upload/" + str(request_id), headers=http_headers, proxies=_https_proxy, ) try: j = json.loads(r.text) except json.decode.JSONDecodeError as e: print("Cannot decode JSON response from the server: {}".format(r.text)) raise e _check_for_errors(j) return j def _get_versions(server, key, bundle, params=None): """Get versions of the specified bot""" import requests global _https_proxy if params is None: params = {} params["bundle"] = bundle http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.get( server + "/cloud/developer/versions", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _update_latest_version(server, key, bundle, data): """Update latest version for the specified bot""" import requests global _https_proxy j = json.dumps(data) http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.put( server + "/cloud/developer/versions?bundle=" + bundle, headers=http_headers, data=j, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _update_version_status(server, key, bundle, status, ignore_errors=False): """ Update the version status of the specified bot :param server: :param key: :param bundle: :param status: :param ignore_errors: :return: """ import requests global _https_proxy params = {"bundle": bundle, "status": status} http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.put( server + "/cloud/developer/versionStatus", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) if not ignore_errors: _check_for_errors(j) return j def _upload_bot( server, key, bundle, bot_filename, tar=True, runtime=DEFAULT_RUNTIME_PYTHON, memory=DEFAULT_MEMORY, timeout=DEFAULT_RUNTIME_TIMEOUT_S, ): """ Upload the bot :param server: Server address :param key: API key :param bundle: Bundle ID :param bot_filename: Bot filename :param tar: True for a .tar file; False for a .zip file :param runtime: Runtime environment. 1=Python 2.7; 2=Python 3.7 :param memory: Environment memory limit. :param timeout: Environment timeout. :return: JSON result code """ import requests global _https_proxy if tar: # TAR bot_filename += ".tar" content_type = "application/x-tar" params = { "source": True, "async": True, "bundle": bundle, "runtime": runtime, "memory": memory, "timeout": timeout, } else: # ZIP bot_filename += ".zip" content_type = "application/zip" params = {} with open(bot_filename, "rb") as payload: http_headers = {"API_KEY": key, "Content-Type": content_type} r = requests.post( server + "/cloud/developer/upload", headers=http_headers, params=params, data=payload, proxies=_https_proxy, ) j = json.loads(r.text) # print("RESPONSE: {}".format(json.dumps(j, indent=2, sort_keys=True))) _check_for_errors(j) return j def _update_bot_runtime_parameters( server, key, bundle, development=True, memory=None, timeout=None ): """ Update memory size and timeout of bot in AWS Lambda. Available for billing bots and for regular bot versions with statuses 1,2,3,4. :param server: Server address :param key: API key :param bundle: Bundle ID :param development: Which version to update, development version or publicly available version :param memory: Environment memory limit. :param timeout: Environment timeout. :return: JSON result code """ import requests global _https_proxy if memory is None and timeout is None: raise BotError( "Cannot update bot runtime parameters: Missing memory or timeout", -1 ) params = {"development": development} if memory is not None: params["memory"] = memory if timeout is not None: params["timeout"] = timeout http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.put( server + "/cloud/developer/botParams/{}".format(bundle), headers=http_headers, params=params, proxies=_https_proxy, ) j = json.loads(r.text) # print("RESPONSE: {}".format(json.dumps(j, indent=2, sort_keys=True))) _check_for_errors(j) return j def _get_app_statistics(server, key, bundle): """ Get the statistics of the specified bot :param server: Server address :param key: API key :param bundle: Bot bundle ID :return: JSON status """ import requests global _https_proxy params = {} params["bundle"] = bundle http_headers = {"API_KEY": key, "Content-Type": "application/json"} r = requests.get( server + "/cloud/developer/stats", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _get_bot_beta_testers(server, user_key, bundle_id): """ Get the list of bot beta testers for the given bot bundle :param server: Server :param user_key: Developer's user API key :param bundle_id: Bot bundle ID :return: JSON """ import requests global _https_proxy params = {} params["bundle"] = bundle_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.get( server + "/cloud/developer/tester", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _add_bot_beta_testers(server, user_key, bundle_id, user_id): """ Add a bot beta tester to the given bot bundle :param server: Server :param user_key: Developer's user API key :param bundle_id: Bot bundle ID :param user_id: User ID to add to the bundle :return: JSON """ import requests global _https_proxy params = {} params["bundle"] = bundle_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.put( server + "/cloud/developer/tester/" + str(user_id), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _delete_bot_beta_testers(server, user_key, bundle_id, user_id): """ Delete a bot beta tester from the given bot bundle :param server: Server :param user_key: Developer's user API key :param bundle_id: Bot bundle ID :param user_id: User ID to add to the bundle :return: JSON """ import requests global _https_proxy params = {} params["bundle"] = bundle_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.delete( server + "/cloud/developer/tester/" + str(user_id), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _beta_purchase_bot(server, user_key, bundle_id, user_id=None): """ As an approved beta tester, purchase the bot into your account for beta testing :param server: :param user_key: :param bundle_id: :return: """ import requests global _https_proxy params = {} params["bundle"] = bundle_id if user_id is not None: params["userId"] = user_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.post( server + "/cloud/developer/tester", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _get_message_topics(server, key=None, analytic_key=None, app_id=None, lang=None): """ Get a list of message topics :param server: The server URL :param key: The user's API key :return: A list of message topics """ _bot_loggers["botengine"].info(">_get_message_topics()") import requests global _https_proxy params = {} params["appId"] = app_id params["lang"] = lang _bot_loggers["botengine"].info("|_get_message_topics() params={}".format(params)) http_headers = {"Content-Type": "application/json"} http_headers["API_KEY"] = key http_headers["ANALYTIC_API_KEY"] = analytic_key r = requests.get( server + "/cloud/json/messageTopics", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info("<_get_message_topics() response={}".format(j)) return j def _update_message_topics(server, key, bundle, topics): """ Create message topics if they don't exist and update the list of topics for the specified bot. :param server: Server address :param key: API key :param bundle: Bundle ID :param topics: List of topics :return: """ _bot_loggers["botengine"].info(">_update_message_topics()") import requests global _https_proxy params = {"bundle": bundle} http_headers = {"API_KEY": key, "Content-Type": "application/json"} _bot_loggers["botengine"].info( "|_update_message_topics() params={} data={}".format(params, topics) ) j = json.dumps(topics) r = requests.put( server + "/cloud/developer/messageTopics", params=params, headers=http_headers, data=j, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info("<_update_message_topics() response={}".format(j)) return j def _create_messages( server, key=None, analytic_key=None, location_id=None, messages_json={} ): """ Create new messages at a location. :param server: The server URL :param key: The user key :param analytic_key: The analytic key :param location_id: The location ID :param messages_json: The messages json content :return: """ _bot_loggers["botengine"].info(">_create_messages()") import requests global _https_proxy params = {} params["locationId"] = location_id http_headers = {"Content-Type": "application/json"} http_headers["API_KEY"] = key http_headers["ANALYTIC_API_KEY"] = analytic_key j = json.dumps(messages_json) _bot_loggers["botengine"].info( "|_create_messages() params={} data={}".format(params, messages_json) ) r = requests.post( server + "/cloud/json/messages", params=params, headers=http_headers, data=j, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info("<_create_messages() response={}".format(j)) return j def _update_messages(server, key, messages_json): """ AI bots can update message statuses and schedule delivery time. :param server: The server URL :param key: The user key :param messages_json: The messages json content :return: """ _bot_loggers["botengine"].info(">_update_messages()") import requests global _https_proxy http_headers = {"ANALYTIC_API_KEY": key, "Content-Type": "application/json"} j = json.dumps(messages_json) _bot_loggers["botengine"].info("|_update_messages() data={}".format(messages_json)) r = requests.put( server + "/cloud/json/messages", headers=http_headers, data=j, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info("<_update_messages() response={}".format(j)) return j def _get_messages( server, key, location_id, start_date_ms, end_date_ms, instance, topic_id, read_status, ): """ Get messages from a location :param server: The server URL :param key: The user key :param location_id: The location ID :param start_date_ms: The start date in milliseconds :param end_date_ms: The end date in milliseconds :param instance: The bot instance ID :param topic_id: The topic ID :param read_status: The read status :return: Messages JSON """ _bot_loggers["botengine"].info(">_get_messages()") import requests global _https_proxy params = { "locationId": location_id, "startDate": start_date_ms, } params["end_date_ms"] = end_date_ms params["instance"] = instance params["topic_id"] = topic_id params["read_status"] = read_status http_headers = {"API_KEY": key, "Content-Type": "application/json"} _bot_loggers["botengine"].info("|_get_messages() params={}".format(params)) r = requests.get( server + "/cloud/json/messages", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info("<_get_messages() response={}".format(j)) return j def _update_message_read_status(server, key, location_id, message_id, read_status): """ Update message read status :param server: The server URL :param key: The user key :param location_id: The location ID :param message_id: The message ID :param read_status: The read status :return: """ _bot_loggers["botengine"].info(">_update_message_read_status()") import requests global _https_proxy params = { "locationId": location_id, "messageId": message_id, "readStatus": read_status, } http_headers = {"API_KEY": key, "Content-Type": "application/json"} _bot_loggers["botengine"].info( "|_update_message_read_status() params={}".format(params) ) r = requests.put( server + "/cloud/json/messageRead", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info( "<_update_message_read_status() response={}".format(j) ) return j def _delete_states(server, user_key, location_id): """ Set state to the cloud :param server: :param user_key: :param location_id: :return: """ http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.delete( server + "/cloud/json/locations/{}/state".format(location_id), headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) def _set_state( server, user_key, location_id, address, json_content, overwrite=True, timestamp_ms=None, publish_to_partner=True, ): """ Set state to the cloud :param server: :param user_key: :param location_id: :param address: :param json_content: :param overwrite: :param timestamp_ms: Timestamp for time-based state variables :param publish_to_partner: True or False to stream this state update to a partner cloud. Default is True, streaming enabled. :return: """ body = {"value": json_content} params = {"name": address, "overwrite": overwrite, "publish": publish_to_partner} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy if timestamp_ms is None: r = requests.put( server + "/cloud/json/locations/{}/state".format(location_id), params=params, data=json.dumps(body), headers=http_headers, proxies=_https_proxy, ) else: params["date"] = timestamp_ms r = requests.put( server + "/cloud/json/locations/{}/timeStates".format(location_id), params=params, data=json.dumps(body), headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) def _set_location_priority(server, bot_key, location_id, priority): """ Set location priority :param server: :param bot_key: :param location_id: :param priority: :return: """ import requests global _https_proxy body = {"location": priority} http_headers = {"ANALYTIC_API_KEY": bot_key, "Content-Type": "application/json"} r = requests.put( server + "/analytic/location/{}".format(location_id), data=json.dumps(body), headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) def _get_narratives( server, key, location_id, row_count=100, narrative_id=None, narrative_time=None, narrative_type=None, scope=None, priority=None, toPriority=None, status=None, event_type=None, search_by=None, start_date_ms=None, end_date_ms=None, page_marker=None, ): """ Get narratives from a location :param server: The server URL :param key: The user key :param location_id: The location ID :param row_count: The number of rows to return :param narrative_id: The narrative ID :param narrative_time: The narrative time :param narrative_type: The narrative type :param scope: The scope. 1 = location, default; 2 = organization, for bots only :param priority: Filter by priority higher or equal than that :param toPriority: Filter by priority lower or equal than that :param status: The status. deleted are not returned by default :param event_type: The event type :param search_by: Filter by title or description. Use * for a wildcard. :param start_date_ms: The start date in milliseconds :param end_date_ms: The end date in milliseconds :param page_marker: The page marker :return: Narratives JSON """ _bot_loggers["botengine"].debug(">_get_narratives()") import requests global _https_proxy params = {"rowCount": row_count} if narrative_id is not None: params["narrativeId"] = narrative_id if narrative_time is not None: params["narrativeTime"] = narrative_time if narrative_type is not None: params["narrativeType"] = narrative_type if scope is not None: params["scope"] = scope if priority is not None: params["priority"] = priority if toPriority is not None: params["toPriority"] = toPriority if status is not None: params["status"] = status if event_type is not None: params["eventType"] = event_type if search_by is not None: params["searchBy"] = search_by if start_date_ms is not None: params["startDate"] = start_date_ms if end_date_ms is not None: params["endDate"] = end_date_ms if page_marker is not None: params["pageMarker"] = page_marker http_headers = {"API_KEY": key, "Content-Type": "application/json"} _bot_loggers["botengine"].debug("|_get_narratives() params={}".format(params)) r = requests.get( server + "/cloud/json/locations/{}/narratives".format(location_id), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].debug("<_get_narratives() response={}".format(j)) return j def _create_or_update_narrative( server, analytic_key, location_id, scope, publish=None, narrative_id=None, narrative_time=None, narrative={}, ): """ Create or Update a Narrative Narrative time is always truncated to seconds by the API. When a new narrative is created, the API returns the new record ID and narrativeTime in milliseconds. To update an existing narrative record both narrativeId and narrativeTime query parameters must be provided. The new value of narrativeTime in milliseconds will be returned, if it has been changed. :param server: The server URL :param key: The bot analytic api key :param location_id: The location ID :param scope: 1 = location; 2 = organization :param publish: Publish narrative to subscribers, default is true :param narrative_id: ID of narrative - required for update :param narrative_time: narrative time as returned from the API - required for update :param narrative: The narrative content :return: Narratives JSON """ _bot_loggers["botengine"].debug(">_create_or_update_narrative()") import requests global _https_proxy params = {"scope": scope} if publish is not None: params["publish"] = publish if narrative_id is not None: params["narrativeId"] = narrative_id if narrative_time is not None: params["narrativeTime"] = narrative_time body = {"narrative": narrative} http_headers = { "ANALYTIC_API_KEY": analytic_key, "Content-Type": "application/json", } _bot_loggers["botengine"].debug( "|_create_or_update_narrative() params={} data={}".format(params, body) ) r = requests.put( server + "/cloud/json/locations/{}/narratives".format(location_id), params=params, headers=http_headers, proxies=_https_proxy, data=json.dumps(body), ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info( "<_create_or_update_narrative() response={}".format(j) ) return j def _delete_narrative( server, analytic_key, location_id, scope, narrative_id, narrative_time, publish=None, event_type=None, ): """ Delete a narrative :param server: The server URL :param key: The bot analytic api key :param location_id: The location ID :param scope: 1 = location; 2 = organization :param narrative_id: ID of narrative :param narrative_time: narrative time as returned from the API :param publish: Publish narrative to subscribers, default is true :param event_type: The event type :return: Narratives JSON """ _bot_loggers["botengine"].debug(">_delete_narrative()") import requests global _https_proxy params = { "scope": scope, "narrativeId": narrative_id, "narrativeTime": narrative_time, } if publish is not None: params["publish"] = publish if event_type is not None: params["eventType"] = event_type http_headers = { "ANALYTIC_API_KEY": analytic_key, "Content-Type": "application/json", } _bot_loggers["botengine"].debug("|_delete_narrative() params={}".format(params)) r = requests.delete( server + "/cloud/json/locations/{}/narratives".format(location_id), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].info("<_delete_narrative() response={}".format(j)) return j def _listen( device_server, user_key, bot_instance_id, timeout=10, clean=True, cleanTime=None ): """ Apps running on the developer's local computer will listen to Ensemble for incoming device data. :param device_server: Device Server URL :param user_key: User's /cloud API key :param timeout: HTTP timeout in seconds :param clean: Set to 'true' to avoid having the server repeat itself """ import requests global _https_proxy params = {"appInstanceId": bot_instance_id, "timeout": timeout} if cleanTime: params["cleanTime"] = cleanTime else: params["clean"] = clean http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} try: r = requests.get( device_server + "/deviceio/analytic", params=params, headers=http_headers, proxies=_https_proxy, timeout=timeout, ) j = json.loads(r.text) return j except requests.exceptions.ReadTimeout: return {} # Return an empty dictionary or any default value except requests.exceptions.RequestException as e: # Handle other potential exceptions print(f"Error during request: {e}") return {} # Return an empty dictionary or any default value def _check_for_errors(json_response): """ Check some JSON response for BotEngine errors """ if not json_response: raise BotError("No response from the server!", -1) # Not a PPC cloud response if "resultCode" not in json_response: return # Throw errors for results codes greater than 0 if json_response["resultCode"] > 0: # Map message to json response msg = "Unknown error!" if "resultCodeMessage" in json_response.keys(): msg = json_response["resultCodeMessage"] del json_response["resultCodeMessage"] elif "resultCodeDesc" in json_response.keys(): msg = json_response["resultCodeDesc"] del json_response["resultCodeDesc"] # Include full json response in bot error message if additional keys provided if len(json_response.keys()) > 1: msg += " {}".format(json.dumps(json_response)) raise BotError(msg, json_response["resultCode"]) # Remove response code for simplicity del json_response["resultCode"] def _get_device(server, user_key, device_id, check_connected=False): """Maybe we need this api to obtain the device type and others, or send device Object via command line""" import requests global _https_proxy params = {"checkConnected": check_connected} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.get( server + "/cloud/json/devices/" + device_id, params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j["device"] def _get_devices_from_location(server, user_key, location_id): """ Get all the devices from your location https://iotapps.docs.apiary.io/reference/devices/manage-devices/get-a-list-of-devices :param server: Server :param user_key: API Key :param location_id: Location ID """ http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy params = {"locationId": location_id} r = requests.get( server + "/cloud/json/devices", headers=http_headers, params=params, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j.get("devices", []) def _send_command(server, user_key, location_id, device_id, name, value, index=None): """ Send a command to a device :param server: :param user_key: :param location_id: :param device_id: :param name: :param value: :param index: :return: """ command_param = {"name": name, "value": value} if index is not None: command_param["index"] = index body = {"params": [command_param]} http_params = {"locationId": location_id} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.put( server + "/cloud/json/devices/{}/parameters".format(device_id), params=http_params, data=json.dumps(body), headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) def _get_devices_from_organization( server, user_key, organization_id, linked_to=1, device_type=None, user_id=None ): """ Get devices from an organization http://docs.iotadmins.apiary.io/#reference/users-and-locations/locations-in-an-organization/get-devices :param server: Server :param user_key: User Key :param linked_to: 1=users; 2=locations; 3=user and locations :param organization_id: Organization ID to pull devices from :param device_type: Device type to filter by :param user_id: User ID to filter by """ import requests global _https_proxy http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} params = {"linkedTo": linked_to} if device_type: params["deviceType"] = device_type if user_id: params["userId"] = user_id r = requests.get( server + "/admin/json/organizations/" + str(organization_id) + "/devices", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j.get("devices", []) def _summarize_apps(server, user_key, bundle=None): """ Developer helper method This method will print out all the bots in the user's account """ j = _get_apps(server, user_key, bundle) bots = j.get("apps", []) sys.stderr.write("\n") sys.stderr.write( Color.BOLD + "BUNDLE".ljust(45) + "STATUS".ljust(30) + "(CATEGORY)" + Color.END ) sys.stderr.write("\n") for bot in bots: bundle = bot.get("bundle") if bundle is None: bundle = bot.get("bot", {}).get("bundle") version_info = _get_versions(server, user_key, bundle) try: status = version_info["versions"][0]["status"] except Exception as e: print(f"Error: {e}") status = 0 sys.stderr.write( bot["bundle"].ljust(45) + str(VERSION_STATUS_DICT[status]).ljust(30) + "(" + bot.get("category", "") + ")\n" ) sys.stderr.write("\n") def _summarize_versions(server, user_key, bundle=None, version=None): """Developer helper method This method will print out all the versions of your bot in the user's account""" if not bundle: _summarize_apps(server, user_key) bundle = input("Which bundle ID should we use to: ") # if not version: j = _get_versions(server, user_key, bundle) versions = j.get("versions") sys.stderr.write("\n") sys.stderr.write("VERSION".ljust(40) + "\t(STATUS - CREATION DATE)\n") sys.stderr.write("\n") for v in versions: status = VERSION_STATUS_DICT.get(v["status"], v["status"]) sys.stderr.write( v["version"].ljust(40) + "\t(" + status + " - " + v["creationDate"] + ")\n" ) sys.stderr.write("\n") def _summarize_devices(server, user_key, location_id): """ Print all available devices in the given location :param server: :param user_key: :param location_id: :return: """ devices = _get_devices_from_location(server, user_key, location_id) if not devices: return connected = [] disconnected = [] relevant_devices = [] for device in devices: if device["connected"] is True: relevant_devices.append(device) connected.append( ( device["type"], device["id"].ljust(40) + "\t(" + str(device["type"]) + " - '" + device.get("desc", "") + "' - Connected)\n", ) ) sys.stderr.write("\n") for device in devices: if device["connected"] is False: relevant_devices.append(device) disconnected.append( ( device["type"], device["id"].ljust(40) + "\t(" + str(device["type"]) + " - '" + device.get("desc", "") + "' - Disconnected)\n", ) ) connected = sorted(connected) disconnected = sorted(disconnected) sys.stderr.write("\n\nDEVICE ID".ljust(40) + "\t\t(TYPE - NICKNAME - STATUS)\n") for device in connected: sys.stderr.write(str(device[1])) if len(connected) > 0: sys.stderr.write("\n") for device in disconnected: sys.stderr.write(str(device[1])) sys.stderr.write("\n") return relevant_devices def _summarize_device_types(server, user_key): """ Summarize and print out all the device types available on this server """ http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/json/deviceTypes", headers=http_headers, proxies=_https_proxy ) device_types = json.loads(r.text) _check_for_errors(device_types) print("\n" + Color.BOLD + "Available Device Types" + Color.END) print("-" * 40) categories = {} for device_type in device_types["deviceTypes"]: category_defined = False if device_type["id"] > 3: if "attributes" in device_type: for attribute in device_type["attributes"]: if "name" in attribute: if attribute["name"] == "category": if int(attribute["value"]) not in categories: categories[int(attribute["value"])] = {} categories[int(attribute["value"])][device_type["id"]] = ( device_type ) category_defined = True if not category_defined: if -1 not in categories: categories[-1] = {} categories[-1][device_type["id"]] = device_type for category_id in sorted(categories): print(Color.BOLD + _device_category_to_string(category_id) + Color.END) for device_type_id in sorted(categories[category_id]): url = "" if "attributes" in categories[category_id][device_type_id]: for attr in categories[category_id][device_type_id]["attributes"]: if "name" in attr: if attr["name"] == "storeUrl": url = "(" + attr["value"] + ")" break print( "\t" + Color.BOLD + str(device_type_id) + Color.END + " : " + categories[category_id][device_type_id]["name"] + url ) print() print("\n" + the_bot() + "Done!") def _get_parameters(server, user_key, parameter=None): """ Get the details of all the public parameters in the system :param server: Server :param user_key: User key :return: API response """ http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy params = {} if parameter is not None: params = {"paramName": parameter} r = requests.get( server + "/cloud/json/deviceParameters", params=params, headers=http_headers, proxies=_https_proxy, ) parameters = json.loads(r.text) _check_for_errors(parameters) return parameters def _device_category_to_string(category_id): """ :param category_id: Category ID to convert to a string :return: Category description """ if category_id < 0: return "No Category" elif category_id < 50: return "Reserved" elif category_id < 1000: return "Temporary" elif category_id < 2000: return "Administrative Tools" elif category_id < 3000: return "Alarms" elif category_id < 4000: return "Analytics" elif category_id < 5000: return "Appliances" elif category_id < 6000: return "Audio" elif category_id < 7000: return "Cameras" elif category_id < 8000: return "Climate Control" elif category_id < 9000: return "Displays" elif category_id < 10000: return "Environmental" elif category_id < 11000: return "Health" elif category_id < 12000: return "Lighting" elif category_id < 13000: return "Locks" elif category_id < 14000: return "Media" elif category_id < 15000: return "Meters" elif category_id < 16000: return "Perimeter Monitoring" elif category_id < 17000: return "Remote Controls" elif category_id < 18000: return "Robotics" elif category_id < 19000: return "Routers and Gateways" elif category_id < 20000: return "Security" elif category_id < 21000: return "Sensors" elif category_id < 22000: return "Shades" elif category_id < 23000: return "Social" elif category_id < 24000: return "Switches" elif category_id < 25000: return "Toys" elif category_id < 26000: return "Transportation" elif category_id < 27000: return "Videos" elif category_id < 28000: return "Water" def _botstore_search( server, user_key, search_by=None, categories=None, compatible=None, lang=None, core=None, location_id=None, organization_id=None, object_names=None, limit=None, ): """ Search the bot store for bots :param server: Server to use :param user_key: User's /cloud API key :param searchBy: Search in name, author, keywords, bundle. Use * for a wildcard. :param categories: Category search. i.e. 'S', 'E', etc. Multiple values are allowed and OR-ed. :param compatible: Filter by bots that are compatible with our user account or not, leave blank to return all bots :param lang: Language filter, leave blank to return bots in all languages :param core: Filter by core class :param locationId: Return bots available for this location :param organizationId: Return bots available for this organization :param objectName: Show objects with such name(s). Multiple values are allowed. :param limit: Limit the response size :returns: List of bots matching the search criteria in JSON format """ _bot_loggers["botengine"].debug( ">_botstore_search() searchBy={}, categories={}, compatible={}, lang={}, core={}, locationId={}, organizationId={}, objectName={}, limit={}".format( search_by, categories, compatible, lang, core, location_id, organization_id, object_names, limit, ) ) params = {} if search_by: params["searchBy"] = search_by if categories: params["category"] = categories if compatible: params["compatible"] = compatible if lang: params["lang"] = lang if core: params["core"] = core if location_id: params["locationId"] = location_id if organization_id: params["organizationId"] = organization_id if object_names: params["objectName"] = object_names if limit: params["limit"] = limit http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/appstore/search", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].debug("<_botstore_search() response={}".format(j)) return j def _botstore_botinfo(server, user_key, bundle, location_id=None, lang=None): """ View the details of an bot on the bot store :param server: Server to use :param user_key: User's /cloud API key :param bundle: Bundle ID to view :param lang: Optional string. View the bot information in a particular language, i.e. "en", "zh", etc. :returns: Bot information from the bot store in JSON format """ params = {"bundle": bundle} if lang: params["lang"] = lang if location_id: params["locationId"] = location_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/appstore/appInfo", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j["app"] def _botstore_execution_history( server, user_key, bundle, developer=None, app_instance_id=None, flow=None, trigger=1, errors_only=None, start_date=None, end_date=None, row_count=None, sort_order=None, ): """ The API returns the execution history of a bot within specified date range. If 'appInstanceId' is specified, the API returns data for the specific bot instance. If 'bundle' is specified, the API returns data for the public or latest version of bot depending on the 'development' parameter value. The result can be filtered by flow, trigger, result code. The result is always ordered by execution's request date. The order can be chronological or in anti-chronological (newest first). The execution history, even filtered by parameters and limited by dates, can be very large. The actual number of execution history events that match the request parameters is displayed in the 'executionsCount' field of the response. The number of execution history events returned never exceeds 500 (the 'executionDisplayed' field of the response). The maximum number of displayed events can be set using the 'rowCount' parameter. By default, rowCount=200. :param bundle: Bot bundle ID. The parameter is required if the bot instance ID is not specified. :param developer: True' means the developer version, 'false' (default) means public version. The parameter is ignored if the bot instance ID is specified. :param app_instance_id: Filter by bot instance ID. :param flow: Filter by the execution flow. :param trigger: Filter by trigger. :param errors_only: If true then only failed executions are returned. :param start_date: Execution history start date. :param end_date: Execution history end date. :param row_count: The maximun number of the records to display (default is 200). :param sort_order: asc (oldest executions first) or desc (newest executions first). :returns: Bot execution json """ params = {"bundle": bundle} if developer is not None: params["developer"] = developer if app_instance_id is not None: params["appInstanceId"] = app_instance_id if flow is not None: params["flow"] = flow if trigger is not None: params["trigger"] = trigger if errors_only is not None: params["errorsOnly"] = errors_only if start_date is not None: params["startDate"] = start_date if end_date is not None: params["endDate"] = end_date if row_count is not None: params["rowCount"] = row_count if sort_order is not None: params["sortOrder"] = sort_order http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/developer/executionHistory", params=params, headers=http_headers, proxies=_https_proxy, ) try: j = json.loads(r.text) _check_for_errors(j) return j except Exception as e: print("Error: {}".format(e)) raise BotError(f"Unknown Error {e}", -1) def _add_bot_to_organization(server, user_key, bundle, organization_id): """ A bot developer proposes to use a bot in specific organization. :param server: Server to use :param user_key: User's /cloud API key :param bundle: Bot bundle ID to obtain or purchase :param organization_id: Organization ID to allow to purchase the bot """ params = {"bundle": bundle} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.post( server + "/cloud/appstore/organizations/" + str(organization_id), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) def _approve_bot_for_organization( server, user_key, bundle, organization_id, status, development=False ): """ Approve Bot for Organization :param server: Server to use :param user_key: User's /cloud API key :param bundle: Bot bundle ID to obtain or purchase :param organization_id: Organization ID to allow to purchase the bot :param status: 1=approve; 2=reject :param development: Is for testing purposes :return: """ params = { "bundle": bundle, "status": status, "development": development, } http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.put( server + "/cloud/appstore/organizations/" + str(organization_id), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) def _remove_an_organization(server, user_key, bundle, organization_id): """ Remove a bot from an organization :param server: Server to use :param user_key: User's /cloud API key :param bundle: Bot bundle ID to obtain or purchase :param organization_id: Organization ID to allow to purchase the bot :return: """ params = {"bundle": bundle} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.delete( server + "/cloud/appstore/organizations/" + str(organization_id), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) def _get_organizations(server, user_key, bundle): """ This API returns all organizations associated with the bot. :param server: Server to use :param user_key: User's /cloud API key :param bundle: Bot bundle ID to obtain or purchase :return: List of organizations """ _bot_loggers["botengine"].debug(">_get_organizations() bundle={}".format(bundle)) params = {"bundle": bundle} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/appstore/organizations", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].debug("<_get_organizations() response={}".format(j)) return j def _botstore_purchasebot( server, user_key, bundle, location_id=None, organization_id=None ): """Delete an bot instance you previously purchased :param server: Server to use :param user_key: User's /cloud API key :param bundle: Bot bundle ID to obtain or purchase :return: Purchased bot instance ID """ params = {"bundle": bundle} if organization_id is not None: print( "Attempting to purchasing the bot " + bundle + " into organization ID " + str(organization_id) ) params["organizationId"] = organization_id elif location_id is not None: print( "Attempting to purchasing the bot " + bundle + " into location ID " + str(location_id) ) params["locationId"] = location_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.post( server + "/cloud/appstore/appInstance", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) if "appInstanceId" in j: return j["appInstanceId"] else: return None def _botstore_configure( server, user_key, bot_instance_id, configuration, status, location_id=None ): """ Configure an bot that was purchased on the bot store :param server: Server to use :param user_key: User's /cloud API key :param bot_instance_id: Bot instance ID to configure in the user's account :param configuration: Configuration dictionary :param status: 0=incomplete; 1=active; 2=inactive :returns: True if the bot was configured """ params = {"appInstanceId": bot_instance_id, "status": status} if location_id is not None: params["locationId"] = location_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy if configuration is not None: body = json.dumps(configuration) else: body = None # print("Configure params = {}".format(params)) # print("Configure body = {}".format(body)) r = requests.put( server + "/cloud/appstore/appInstance", params=params, headers=http_headers, data=body, proxies=_https_proxy, ) j = json.loads(r.text) try: _check_for_errors(j) return True except BotError as e: sys.stderr.write(e.msg + "\n\n") return False def _botstore_deletebot(server, user_key, app_instance_id, location_id=None): """Delete an bot instance you previously purchased :param server: Server to use :param user_key: User's /cloud API key :param app_instance_id: Bot instance ID to delete :return: True if the bot was deleted, False if the bot wasn't found """ params = {"appInstanceId": app_instance_id} if location_id is not None: params["locationId"] = location_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.delete( server + "/cloud/appstore/appInstance", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) # print("Delete Response: " + json.dumps(j, indent=2, sort_keys=True)) return j["resultCode"] == 0 def _botstore_mybots( server, user_key, app_instance_id=None, location_id=None, organization_id=None ): """ Get a list of the bots you've purchased https://iotbots.docs.apiary.io/reference/end-user-bot-shop-apis/bot-purchases/get-bot-instances :param server: Server to use :param user_key: User's /cloud API key :param app_instance_id: Optional ID of a specific bot to obtain information about :param organization_id: Optional Organization ID :param location_id: Optional Location ID :returns: Bot information on your purchased bots, in JSON format """ params = {} if app_instance_id: params["appInstanceId"] = app_instance_id if organization_id is not None: params["organizationId"] = organization_id if location_id is not None: params["locationId"] = location_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/appstore/appInstance", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) # print("\nRESPONSE: {}\n".format(json.dumps(j, indent=2, sort_keys=True))) _check_for_errors(j) return j.get("bots", j.get("apps")) def get_editable_bot_configuration(current_app_configuration): """Get an editable bot configuration :param current_app_configuration: Full JSON Dictionary definition of the bot instance from the server - not an array :returns: Editable configuration """ config = current_app_configuration editable = {"app": {}} try: editable["app"]["access"] = config["access"] except Exception as e: print(f"Access Not Found: {e}") pass try: editable["app"]["communications"] = config["communications"] except Exception as e: print(f"Communications Not Found: {e}") pass try: editable["app"]["nickname"] = config["nickname"] except Exception as e: print(f"Nickname Not Found: {e}") pass try: editable["app"]["nickname"] = config["timezone"] except Exception as e: print(f"Timezone Not Found: {e}") pass return editable def _get_questions( server, user_key, instance_id=None, question_id=None, answer_status=None, location_id=None, organization_id=None, collection_name=None, general_public=None, language=None, ): """ Get questions for the specific bot instance ID :param server: Server instance :param user_key: API key :param instance_id: Optional Bot instance ID to obtain questions from - if left blank, all the user's questions will be returned :param answer_status: Return questions with a specific answer status. By default, questions with statuses 2 and 3 are returned. Multiple values are supported. :param location_id: Location ID :param organization_id: Filter by organization ID :param collection_name: Filter by collection name :param general_public: True to return only public questions, False to return private questions :param language: Questions text language to return """ params = {} if instance_id is not None: params["appInstanceId"] = instance_id if question_id is not None: params["questionId"] = question_id if answer_status is not None: params["answerStatus"] = answer_status if location_id is not None: params["locationId"] = location_id if organization_id is not None: params["organizationId"] = organization_id if collection_name is not None: params["collectionName"] = collection_name if language is not None: params["lang"] = language if general_public is not None: params["generalPublic"] = general_public # print("GET QUESTIONS PARAMS: " + str(params)) http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/json/questions", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) _bot_loggers["botengine"].debug( "|_get_questions() response: {}".format(json.dumps(j, indent=2, sort_keys=True)) ) # =========================================================================== # print("GET QUESTIONS RESPONSE:\n" + json.dumps(j, indent=2, sort_keys=True)) # # print("[") # if 'questions' in j: # for question_block in j['questions']: # print(str(question_block['id']) + ", #" + str(question_block['key'])) # print("]") # =========================================================================== return j def _print_question(question): """ Print out a question """ print("\n\n" + Color.UNDERLINE + question["question"] + Color.END) display_type = 0 if "displayType" in question: display_type = question["displayType"] default_answer = None if "defaultAnswer" in question: default_answer = question["defaultAnswer"] response_options = None if "responseOptions" in question: response_options = question["responseOptions"] answer_status = question["answerStatus"] if question["responseType"] == BotEngine.QUESTION_RESPONSE_TYPE_BOOLEAN: if display_type == BotEngine.QUESTION_DISPLAY_BOOLEAN_ONOFF: print("\t=> This is a boolean ON/OFF switch question (0 = OFF; 1 = ON)") if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> Current answer is " + str(default_answer)) else: print("\t=> Current answer is 0") pass elif display_type == BotEngine.QUESTION_DISPLAY_BOOLEAN_YESNO: print("\t=> This is a boolean YES/NO question (0 = NO; 1 = YES)") if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> Current answer is " + str(default_answer)) else: print("\t=> Current answer is 0") pass elif display_type == BotEngine.QUESTION_DISPLAY_BOOLEAN_BUTTON: print("\t=> This is a single button question. (1 = tap the button).") if "placeholder" in question: print( "\t=> The button's text says '" + str(question["placeholder"]) + '"' ) else: print("\t=> The button's text says 'Yes'") pass else: print( "\t=> Unknown display type (" + str(display_type) + ") for this Boolean question" ) pass elif ( question["responseType"] == BotEngine.QUESTION_RESPONSE_TYPE_MULTICHOICE_SINGLESELECT ): if display_type == BotEngine.QUESTION_DISPLAY_MCSS_RADIO_BUTTONS: print( "\t=> This is a Multiple Choice Single Select question with Radio buttons." ) print( "\t=> The answer will be the ID of the response option you want to select." ) pass elif display_type == BotEngine.QUESTION_DISPLAY_MCSS_PICKER: print( "\t=> This is a Multiple Choice Single Select question with a Picker." ) print( "\t=> The answer will be the ID of the response option you want to select." ) pass else: print( "\t=> Unknown display type (" + str(display_type) + ") for this Multiple Choice Single Select question" ) if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> The answer is currently: '" + str(default_answer) + "'") else: # 0 = nothing selected question["defaultAnswer"] = 0 print("\t=> The answer is currently: '0'") # Print response options if not response_options or len(response_options) == 0: print( Color.RED + "\t=> This question is incomplete because it is missing response options." + Color.END ) else: # First, sort the response options by ID responses = {} for o in question["responseOptions"]: responses[o["id"]] = o print("\n\n\tOptions:") # Then print the response options for response_id in sorted(responses.iterkeys()): print( "\t[" + Color.GREEN + str(response_id) + Color.END + "] : " + str(responses[response_id]["text"]) ) pass elif ( question["responseType"] == BotEngine.QUESTION_RESPONSE_TYPE_MULTICHOICE_MULTISELECT ): print("\t=> This is a Multiple Choice Multiple Select question (checkboxes).") print( "\t=> Provide a BITMASK of the response options you want to select - either a decimal value, or hex value with a 0x prefix." ) if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> The answer is currently: '" + str(default_answer) + "'") else: # 0 = nothing selected question["defaultAnswer"] = 0 print("\t=> The answer is currently: '0'") # Print response options if not response_options or len(response_options) == 0: print( Color.RED + "\t=> This question is incomplete because it is missing response options." + Color.END ) else: # First, sort the response options by ID responses = {} for o in question["responseOptions"]: responses[o["id"]] = o print("\n\n\tOptions:") # Then print the response options for response_id in sorted(responses.iterkeys()): print( "\t[" + Color.GREEN + str(response_id) + Color.END + "] : " + str(responses[response_id]["text"]) ) pass elif question["responseType"] == BotEngine.QUESTION_RESPONSE_TYPE_DAYOFWEEK: if display_type == BotEngine.QUESTION_DISPLAY_DAYOFWEEK_MULTISELECT: print("\t=> This is a Multiple Choice Day of the Week question.") print( "\t=> Provide a BITMASK-OR of the days you want to select - either a decimal value, or hex value with a 0x prefix." ) pass elif display_type == BotEngine.QUESTION_DISPLAY_DAYOFWEEK_SINGLESELECT: print("\t=> This is a Single Choice Day of the Week question.") print("\t=> Provide the ID of the day you want to select.") pass else: print( "\t=> Unknown display type (" + str(display_type) + ") for this Day of the Week question" ) pass if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> The answer is currently: '" + str(default_answer) + "'") else: # 0 = nothing selected question["defaultAnswer"] = 0 print("\t=> The answer is currently: '0'") print("\n\n\tOptions:") print("\t[" + Color.GREEN + "1" + Color.END + "] : Sunday") print("\t[" + Color.GREEN + "2" + Color.END + "] : Monday") print("\t[" + Color.GREEN + "4" + Color.END + "] : Tuesday") print("\t[" + Color.GREEN + "8" + Color.END + "] : Wednesday") print("\t[" + Color.GREEN + "16" + Color.END + "] : Thursday") print("\t[" + Color.GREEN + "32" + Color.END + "] : Friday") print("\t[" + Color.GREEN + "64" + Color.END + "] : Saturday") elif question["responseType"] == BotEngine.QUESTION_RESPONSE_TYPE_SLIDER: # Slider questions should always have min,max,inc defined slider_min = None slider_max = None slider_inc = None units = None if "sliderMin" in question: slider_min = question["sliderMin"] if "sliderMax" in question: slider_max = question["sliderMax"] if "sliderInc" in question: slider_inc = question["sliderInc"] if "placeholder" in question: units = str(question["placeholder"]) print("\t=> This is a Slider question.") if units: print("\t=> Description of the units of measurement: " + str(units)) print( "\t=> Answer is a number between " + str(slider_min) + " and " + str(slider_max) + " in increments of " + str(slider_inc) + "." ) if display_type == BotEngine.QUESTION_DISPLAY_SLIDER_INTEGER: print("\t=> Answer will be an integer.") pass elif display_type == BotEngine.QUESTION_DISPLAY_SLIDER_FLOAT: print("\t=> Answer will be a floating point number.") pass elif display_type == BotEngine.QUESTION_DISPLAY_SLIDER_MINSEC: print("\t=> Answer will be in integer seconds.") pass else: print( "\t=> Unknown display type (" + str(display_type) + ") for this Slider question" ) if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> The answer is currently: '" + str(default_answer) + "'") else: question["defaultAnswer"] = (slider_max - slider_min) / 2 print( "\t=> The answer is currently: '" + str(question["defaultAnswer"]) + "'" ) pass elif question["responseType"] == BotEngine.QUESTION_RESPONSE_TYPE_TIME: if display_type == BotEngine.QUESTION_DISPLAY_TIME_HOURS_MINUTES_SECONDS_AMPM: print("\t=> This is a Time Since Midnight question, down to the second.") print("\t=> Answer will be in seconds since midnight.") pass elif display_type == BotEngine.QUESTION_DISPLAY_TIME_HOURS_MINUTES_AMPM: print("\t=> This is a Time Since Midnight question, down to the minute.") print( "\t=> Answer will be in seconds since midnight, rounded to the nearest whole minute." ) pass else: print( "\t=> Unknown display type (" + str(display_type) + ") for this Time question" ) if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> The answer is currently: '" + str(default_answer) + "'") else: # 43200 seconds is 12:00:00 PM question["defaultAnswer"] = 43200 print("\t=> The answer is currently: '43200'") pass elif question["responseType"] == BotEngine.QUESTION_RESPONSE_TYPE_DATETIME: if display_type == BotEngine.QUESTION_DISPLAY_DATETIME_DATE_AND_TIME: print("\t=> This is a Datetime question, including both date and time.") print("\t=> Answer will be of the form: YYYY-MM-DDThh:mm:ss[Z|(+|-)hh:mm]") if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> The answer is currently: '" + default_answer + "'") else: import pytz question["defaultAnswer"] = datetime.datetime.now(pytz.utc).isoformat() print("\t=> The answer is currently: " + question["defaultAnswer"]) pass elif display_type == BotEngine.QUESTION_DISPLAY_DATETIME_DATE: print("\t=> This is a Datetime question, with just the date and no time.") print("\t=> Answer will be of the form: YYYY-MM-DD") if answer_status == 4: print("\t=> User selected answer is " + str(question["answer"])) elif default_answer: print("\t=> The answer is currently: '" + default_answer + "'") else: question["defaultAnswer"] = datetime.datetime.today().strftime( "%Y-%m-%d" ) print("\t=> The answer is currently: " + question["defaultAnswer"]) pass else: print( "\t=> Unknown display type (" + str(display_type) + ") for this Datetime question" ) pass elif question["responseType"] == BotEngine.QUESTION_RESPONSE_TYPE_TEXT: print("\t=> This is an open-ended Text question.") if "placeholder" in question: print( "\t=> The placeholder in the text box says: '" + str(question["placeholder"]) + "'" ) if answer_status == 4: print("\t=> User selected answer is '" + str(question["answer"]) + "'") elif default_answer: print("\t=> The answer is currently: '" + default_answer + "'") else: print("\t=> The text box is blank.") print("\t=> Type any string response you'd like.") pass if "deviceId" in question: print("\t=> deviceId is " + str(question["deviceId"])) if "icon" in question: print("\t=> icon is " + str(question["icon"])) def _answer_question(server, user_key, question, answer, location_id=None): """ Answer a question :param server: Server :param user_key: User Key :param question: Question JSON from the _get_questions API :param answer: Raw answer to provide """ http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} params = None if location_id is not None: params = {"locationId": location_id} body = {"questions": [{"id": question["id"], "answer": answer}]} import requests global _https_proxy r = requests.put( server + "/cloud/json/questions", headers=http_headers, data=json.dumps(body), params=params, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) def _playback_logger_timestamp(self, record, datefmt=None): """ Logger playback timestamp override :param record: :param datefmt: :return: """ global playback_timestamp_ms global playback_timezone import pytz return datetime.datetime.fromtimestamp( playback_timestamp_ms / 1000.0, pytz.timezone(playback_timezone) ) def _create_logger( name, level, console_mode=False, filename=None, playback=False, session_id=None ): """ Create a logger :param name: :param level: :param console_mode: :param filename: :param playback: :return: """ if playback: logging.Formatter.formatTime = _playback_logger_timestamp if session_id is not None: filename = "playback_{}_log.txt".format(session_id) else: filename = "playback_log.txt" logger = logging.getLogger(name) if _bot_loggers and len(name.split(".")) > 0: components = name.split(".") for i in range(len(components)): # Check if the logger is already configured by referencing prefixes if ".".join(components[: i + 1]) in _bot_loggers: return logger logger.setLevel(level) fmt = logging.Formatter("%(asctime)s %(levelname)-8s %(name)-12s %(message)s") if filename is not None: if session_id is None: try: os.remove(filename) except Exception as e: print(f"Error removing file {filename}: {e}") pass f = logging.FileHandler(filename, encoding='utf-8') f.setFormatter(fmt) logger.addHandler(f) if console_mode: stream = sys.stdout try: # Python 3.7+ allows reconfiguring stdout encoding if hasattr(stream, 'reconfigure'): stream.reconfigure(encoding='utf-8') except Exception as e: # Just in case reconfigure fails, silently fallback pass console = logging.StreamHandler(stream) console.setFormatter(fmt) logger.addHandler(console) if not console_mode and not filename: logger.addHandler(logging.NullHandler()) return logger # =============================================================================== # Logging # =============================================================================== def set_cloudwatch_logging( server, user_key: str, app_instance_id: int, execution_flow: int, logging_status: int, logging_end_date_ms: int = None, ): """ Enable/disable logging in AWS CloudWatch for a specific bot instance and flow. * Developer can enable logging for their bot instance at any time and for any period of time. * Developer can disable logging for their bot instance at any time. While logging is enabled, the bot's input contains two additional parameters: `cloudWatchLogGroup` and `cloudWatchLogStream` :param server: Server to use :param user_key: str User key :param app_instance_id: int App instance ID :param execution_flow: int Execution flow. Flow=0 stands for regular triggers, flow=1 stands for data requests (ML). :param logging_status: int Logging status. 1 to enable, 2 to disable. :param logging_end_date_ms: int Logging end date. Logging end date in milliseconds. Required if status=1. :return: None """ if logging_status == 1: if logging_end_date_ms is None: raise BotError("Logging end date required for status=1", -1) import requests global _https_proxy params = { "appInstanceId": app_instance_id, "flow": execution_flow, "status": logging_status, } if logging_end_date_ms is not None: params["endDate"] = BotEngine._strftimestamp(int(logging_end_date_ms) / 1000) http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.put( server + "/cloud/developer/cloudwatchlog", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return None def get_cloudwatch_log_info( server, user_key: str, app_instance_id: int, execution_flow: int ): """ Description of the bot instance log stream in AWS CloudWatch. :param server: Server to use :param user_key: str User key :param app_instance_id: int App instance ID :param execution_flow: int Execution flow. Flow=0 stands for regular triggers, flow=1 stands for data requests (ML). :return: dict Cloudwatch log info | Attribute | Description | Format | | --------- | ----------- | ------ | | groupName | AWS CloudWatch Log Group name. The log group is common for all instances of the bot. | /Bots/cloudIndex/appId. | | streamName | AWS CloudWatch Log Stream name. Each bot instance is assigned an individual log stream. | Id=appInstanceId;flow=flow | | retentionInDays | The log group's retention setting. | | | endDate | Datetime after which logging automatically stops. | | | lastEventDate | The datetime of the most recent log event in the log stream. | | | status | 1 if logging is on, or 2 if logging is off. | | """ import requests global _https_proxy params = {"appInstanceId": app_instance_id, "flow": execution_flow} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.get( server + "/cloud/developer/cloudwatchlog", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j["cloudWatchLog"] def create_an_export_task( server, user_key: str, app_instance_id: int, execution_flow: int, start_date_ms: int, end_date_ms: int = None, ): """ Create an export task Create the export task to export the log stream. https://iotbots.docs.apiary.io/#/reference/developer-bot-lab-ap-is/bot-instance-log-export/create-an-export-task { "resultCode": 0, "logExport": { "appInstanceId": 12345, "bundle": "com.ppc.SomeBot", "startDate": "2023-12-17T12:34:00", "endDate": "2023-12-18T12:34:00", "statusCode": "SUBMITTED", "taskId": "19efa208-4f61-49ba-8899-6edd8b4b3006" } } :param app_instance_id: Bot instance ID :param flow: Execution flow :param start_date_ms: Start date of log events to export :param end_date_ms: End date of log events to export :return: """ import requests global _https_proxy params = { "appInstanceId": app_instance_id, "flow": execution_flow, "startDate": start_date_ms, } if end_date_ms is not None: params["endDate"] = end_date_ms http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.post( server + "/cloud/developer/cloudwatchlog/export", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j["logExport"] def get_export_status(server, user_key: str, task_id: int): """ Get Export Status Inspect the status of an export task and obtain the pre-signed URL(s) to the exported files. https://iotbots.docs.apiary.io/#/reference/developer-bot-lab-ap-is/bot-instance-log-export/get-export-status A large log stream can be exported to multiple files. The export files are gziped. Possible status codes are: - CANCELLED - COMPLETED - FAILED - PENDING - PENDING_CANCEL - RUNNING { "resultCode": 0, "logExport": { "appInstanceId": 12345, "bundle": "com.ppc.SomeBot", "startDate": "2023-12-17T12:34:00", "endDate": "2023-12-18T12:34:00", "statusCode": "COMPLETED", "taskId": "19efa208-4f61-49ba-8899-6edd8b4b3006", "files": [ ... ] } } :param task_id: An export task ID :return: 200 OK """ import requests global _https_proxy params = {"taskId": task_id} http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} r = requests.get( server + "/cloud/developer/cloudwatchlog/export", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j["logExport"] def _download_device_property( server, user_key, location_id, device_id, name=None, index=None ): """ Get device properties from your location https://iotapps.docs.apiary.io/#reference/devices/device-properties/get-device-properties :param device_id: Device ID :param name: Optional name to search for :param index: Optional index to search for """ params = {"locationId": location_id} if name is not None: params["name"] = name if index is not None: params["index"] = index http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/json/devices/{}/properties".format(device_id), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) if "properties" in j: return j["properties"] return [] def _download_only_get_newest_measurements( server, user_key, device_id, location_id=None, parameter_names=[], user_id=None ): """ This method will return the newest measurements from the given device @param device_id: Device ID @param parameter_names: Optional List of parameter name strings for which to download values """ params = {"paramName": parameter_names} if location_id is not None: params["locationId"] = location_id if user_id: params["userId"] = user_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( server + "/cloud/json/devices/" + device_id + "/parameters", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _download_only_get_historical_measurements( server, user_key, device_id, start_date, end_date=None, parameter_names=[], parameter_index=None, aggregation_type=None, aggregation_interval=None, sort_collection=None, sort_by=None, row_count=None, sort_order=None, first_row=None, location_id=None, user_id=None, ): """ Split the call to download historical measurements into multiple 1-week calls :param server: :param user_key: :param device_id: :param start_date: :param end_date: :param parameter_names: :param parameter_index: :param aggregation_type: :param aggregation_interval: :param sort_collection: :param sort_by: :param row_count: :param sort_order: :param first_row: :param location_id: :param user_id: :return: """ import dateutil.relativedelta if end_date is None: end_date = datetime.datetime.now() start_date = datetime.datetime.combine(start_date, datetime.datetime.min.time()) response = {"readings": []} sys.stdout.write("\n\t+ Downloading ...") sys.stdout.flush() while start_date < end_date: focused_end_date = start_date + dateutil.relativedelta.relativedelta(days=7) if focused_end_date > end_date: focused_end_date = end_date sys.stdout.write(".") sys.stdout.flush() time.sleep(10) while True: try: data = _download_only_get_historical_measurements_core( server, user_key, device_id, start_date, focused_end_date, parameter_names, parameter_index, aggregation_type, aggregation_interval, sort_collection, sort_by, row_count, sort_order, first_row, location_id, user_id, ) break except BotError as e: print(str(e.msg)) # Ignore "Cannot complete this in a reasonable amount of time" errors continue if "readings" in data: response["readings"] += data["readings"] start_date = focused_end_date print("\n") return response def _download_only_get_historical_measurements_core( server, user_key, device_id, start_date, end_date=None, parameter_names=[], parameter_index=None, aggregation_type=None, aggregation_interval=None, sort_collection=None, sort_by=None, row_count=None, sort_order=None, first_row=None, location_id=None, user_id=None, ): """ This method will return historical measurements from the given device :param device_id: The exact device ID to send a command to. This is case-sensitive. :param start_date: startDate datetime :param end_date: Optional endDate datetime :param parameter_names: Optional List of parameter names for which to download values :param parameter_index: Optional parameter index :param aggregation_type: Option aggregation type, 8 = 'warm' data with 15 minute intervals (default); 9 = 'hot' data recently received from the device :param aggregation_interval: Optional, show readings aggregated by this interval :param sort_collection: Sort by a collection name (list name) which needs to be ordered or limited :param sort_by: The collection element property used for element comparisons in ordering, like 'timeStamp' :param sort_order: asc = Ascending; desc = Descending :param row_count: The number of collection elements to be returned :param first_row: The index of teh first collection element to be returned starting from zero. If it is not specified, the last rowCount elements will be returned :param user_id: User ID for administrators """ params = {} if end_date is not None: params["endDate"] = end_date.strftime("%Y-%m-%dT%H:%M:%S") if len(parameter_names) > 0: params["paramName"] = parameter_names if parameter_index is not None: params["index"] = parameter_index if aggregation_type is not None: params["aggregation"] = aggregation_type if aggregation_interval is not None: params["interval"] = aggregation_interval if sort_collection is not None: params["sortCollection"] = sort_collection if sort_by is not None: params["sortBy"] = sort_by if row_count is not None: params["rowCount"] = row_count if sort_order is not None: params["sortOrder"] = sort_order if first_row is not None: params["firstRow"] = first_row if user_id is not None: params["userId"] = user_id if location_id is not None: params["locationId"] = location_id http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} import requests global _https_proxy r = requests.get( "{}/cloud/json/devices/{}/parametersByDate/{}".format( server, device_id, start_date.strftime("%Y-%m-%dT%H:%M:%S-00:00") ), params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j def _download_modes_history_to_csv( server, user_key, location_id, start_date, oldest_first=False, destination_directory=None, ): """ Download a history of this user's modes to CSV files :param server: Server to download from :param user_key: User key to download from :param location_id: User's location ID :param start_date: Datetime to start collecting data :param oldest_first: True to write the .csv file with oldest events first :param destination_directory: Destination directory :return: Filename """ print("\t+ Downloading ...") data = _download_modes_history_data(server, user_key, location_id, start_date) filename = "location_" + str(location_id) + "_modes_history.csv" if destination_directory is not None: filename = destination_directory + os.sep + filename f = open(filename, "w") f.write("trigger,timestamp_ms,location_id,event,source_type,source_agent\n") if oldest_first: data["events"] = list(reversed(data["events"])) if "events" in data: for event in data["events"]: source = None if "sourceAgent" in event: source = event["sourceAgent"].replace(",", "_") f.write( "2," + str(event["eventDateMs"]) + "," + str(location_id) + "," + str(event["event"]) + "," + str(event["sourceType"]) + "," + str(source) + "\n" ) f.close() print("\tSaved to: " + filename + "\n") return filename def _download_modes_history_data(server, user_key, location_id, start_date): """ Download a history of this user's modes to CSV files :param server: Server to download from :param user_key: User key to download from :param location_id: User's location ID :param start_date: Datetime to start collecting data :return: data """ http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} params = {"startDate": start_date.strftime("%Y-%m-%dT%H:%M:%S-00:00")} import requests global _https_proxy r = requests.get( server + "/cloud/json/location/" + str(location_id) + "/events", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) try: _check_for_errors(j) except BotError as e: print(r.text) raise e return j def _downloaded_data_to_csv( server, user_key, start_date, device_id, device_type, device_name, behavior, location_id=None, user_id=None, oldest_first=False, destination_directory=None, ): """ Download device data to .csv files :param server: server to connect with :param user_key: user key to download (could be an admin key) :param start_date: Datetime start date :param device_id: Download a single device - device ID to download :param device_type: Download all devices matching the type :param device_name: Name of the device :param behavior: Goal ID of this device :param user_id: User ID to download from if we're downloading as an administrator :param oldest_first: True to write the oldest records first to the .csv file :param destination_directory: Default is the current working directory, otherwise put it in this relative directory :return: List of filenames created """ titles = set([]) last_measurements = {} filenames = [] try: data = _download_only_get_newest_measurements( server, user_key, device_id, location_id=location_id, user_id=user_id ) except Exception as e: print(f"Error downloading data for device {device_id}: {e}") return [] try: device = data["devices"][0] except Exception as e: print(f"No data for device {device_id}: {e}") return [] for p in device["parameters"]: if "name" in p and "value" in p: titles.add(p["name"]) last_measurements[p["name"]] = p["value"] titles = sorted(titles) output = "trigger,device_type,device_id,description,timestamp_ms,timestamp_iso,timestamp_excel,behavior," for t in titles: output = output + t + "," output = output + "\n" readings = _download_only_get_historical_measurements( server, user_key, device_id, start_date, location_id=location_id, user_id=user_id, ) if "readings" not in readings: print("\t- No readings\n") else: print("\t+ Processing " + str(len(readings["readings"])) + " readings ...") device_name = device_name.strip() original_device_name = device_name import re device_name = re.sub("[^0-9a-zA-Z]+", "-", device_name) if device_type is not None: filename = str(device_type) + "_" + device_id + "_" + device_name + ".csv" else: filename = device_id + "_" + device_name + ".csv" if user_id is not None: filename = ( str(device_type) + "_user" + str(user_id) + "_" + device_id + "_" + device_name.strip().replace(" ", "-") + ".csv" ) if destination_directory is not None: filename = destination_directory + os.sep + filename filenames.append(filename) f = open(filename, "w") f.write(output) if oldest_first: readings["readings"] = list(reversed(readings["readings"])) for r in readings["readings"]: ts = datetime.datetime.fromtimestamp( (int(r["timeStampMs"]) / 1000.0) ).strftime("%m/%d/%Y %H:%M:%S") line = ( "8," + str(device_type) + "," + str(device_id) + "," + str(original_device_name) + "," + str(r["timeStampMs"]) + "," + str(r["timeStamp"]) + "," + ts + "," + str(behavior) + "," ) focused_dict = {} for p in r["params"]: if "value" in p: focused_dict[p["name"]] = str(p["value"]) last_measurements[p["name"]] = str(p["value"]) for t in titles: if t in focused_dict: line = ( line + focused_dict[t].replace( ",", COMMA_DELIMITER_REPLACEMENT_CHARACTER ) + "," ) else: line = ( line + last_measurements[t].replace( ",", COMMA_DELIMITER_REPLACEMENT_CHARACTER ) + "," ) f.write(line + "\n") f.close() print("\tSaved to: " + filename + "\n") return filenames def _get_bot_errors(server, user_key, bundle, errors_only=True, developer=False): """ Get a list of errors that all users have experienced running your bot :param server: Server to use :param user_key: Developer user key :param bundle: Bot bundle ID :param errors_only: True to get errors only, False to get all logs (default is True) :return: JSON structure with all the errors """ http_headers = {"API_KEY": user_key, "Content-Type": "application/json"} params = {"failuresOnly": errors_only, "bundle": bundle, "developer": developer} import requests global _https_proxy r = requests.get( server + "/cloud/developer/logs", params=params, headers=http_headers, proxies=_https_proxy, ) j = json.loads(r.text) _check_for_errors(j) return j # cloud/developer/execution/logs?getErrors=true&bundle=com.ppc.ProSecurity def the_bot(): """ :return: the bot 🤖 """ return "🤖 " def cli_progress_bar( iteration, total, prefix="", suffix="", decimals=1, length=100, fill="█" ): """ Quick copy/paste from https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console Call in a loop to create terminal progress bar @params: iteration - Required : current iteration (Int) total - Required : total iterations (Int) prefix - Optional : prefix string (Str) suffix - Optional : suffix string (Str) decimals - Optional : positive number of decimals in percent complete (Int) length - Optional : character length of bar (Int) fill - Optional : bar fill character (Str) """ percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total))) filledLength = int(length * iteration // total) bar = fill * filledLength + "-" * (length - filledLength) print("\r%s |%s| %s%% %s" % (prefix, bar, percent, suffix), end="\r") # Print New Line on Complete if iteration == total: print() # =============================================================================== # Run Methods # =============================================================================== def _run_locally_forever(bot_server, device_server, user_key, bot, bot_instance_id): """Run the bot locally forever, using HTTP long polling to listen for real-time streaming data from the server :param bot_server: Application server for bots :param device_server: Device Server URL :param user_key: User's /cloud API key :param bot: The imported bot module to call the run method within :param bot_instance_id: The specific bot instance ID we're executing against on the server """ import traceback clean_time = None local_execution_count = 0 while True: try: inputs = _listen( device_server, user_key, bot_instance_id, cleanTime=clean_time, clean=False, ) if inputs: # print("BOTENGINE INPUTS: " + json.dumps(inputs, indent=2, sort_keys=True)) if "apiKey" not in inputs: pass else: try: _botengine = _run( bot, inputs, _bot_loggers["botengine"], server_override=bot_server, local=True, local_execution_count=local_execution_count, ) local_execution_count += 1 # _bot_loggers["botengine"].debug("BotEngine Statistics: " + json.dumps(bot.get_intelligence_statistics(_botengine), sort_keys=True) if hasattr(bot, "get_intelligence_statistics") else {}) except Exception as e: print("Bot Execution Exception: \n", e) traceback.print_exc() try: # Grab the last input out of the array and cite its timestamp on the next API call clean_time = int( inputs["inputs"][len(inputs["inputs"]) - 1]["time"] ) except Exception: clean_time = None except Exception as e: import traceback print(f"General Bot Exception: [{e}] \n{traceback.format_exc()}") pass def _run( bot, inputs, logger, context=None, server_override=None, botengine_override=None, local=False, playback=False, local_execution_count=None, ): """ Run the given bot with the given parameters :param bot: bot to run :param inputs: the input JSON from the bot server :param logger: logger object :param server_override: Override the server URL with the known server when executing on someone's computer :param botengine_override: For playback simulators, override the botengine object :return botengine: BotEngine object """ global _bot_loggers if _bot_loggers is None: _bot_loggers = {} _bot_loggers["botengine"] = logger _bot_loggers["botengine"].debug( ">_run() " + Color.RED + "BotEngine Raw Inputs: {}".format(inputs) + Color.END ) next_timer_at_server = None if botengine_override is None: services = None if "services" in inputs: services = inputs["services"] count = None if "count" in inputs: count = int(inputs["count"]) if "timer" in inputs: next_timer_at_server = int(inputs["timer"]) cloud = None if "cloud" in inputs: cloud = inputs["cloud"] # Determine the primary location of execution: on the edge or in the cloud. edge = False if "edgeProxyId" in inputs: if inputs["edgeProxyId"] is not None: edge = True if "id" in inputs: bot_instance_id = int(inputs["id"]) botengine = BotEngine( inputs, server_override=server_override, services=services, count=count, cloud=cloud, edge=edge, local=local, playback=playback, context=context, bot_instance_id=bot_instance_id, local_execution_count=local_execution_count, ) else: botengine = botengine_override botengine.start_time_sec = time.time() if not botengine.edge: botengine._download_core_variables() botengine.load_variables_time_sec = time.time() if not botengine.local and not botengine.playback: for server in botengine._servers: if "sbox" in server: botengine._validate_count() break all_triggers = [] for i in inputs["inputs"]: all_triggers.append(i["trigger"]) botengine.all_trigger_types = all_triggers timers_existed = False botengine.triggers_total = len(all_triggers) for execution_json in inputs["inputs"]: if botengine.playback and "apiKey" in execution_json: botengine.set_api_key(execution_json["apiKey"]) del execution_json["apiKey"] botengine.triggers_index += 1 try: botengine.get_logger(f"{'botengine'}").info( "|_run() Current time: " + str(execution_json["time"]) + "; Trigger: " + str(execution_json["trigger"]) ) botengine.get_logger(f"{'botengine'}").debug( "|_run() Run Inputs: " + json.dumps(execution_json, sort_keys=True) ) except Exception as e: # Ingore error. This might happen during bot playback due to data_request csv content being represented in bytes botengine.get_logger(f"{'botengine'}").warning( "|_run() Failed checking execution json... {}".format(e) ) trigger = execution_json["trigger"] if trigger == 2048 and len(inputs["inputs"]) > 1: botengine.get_logger(f"{'botengine'}").error( "|_run() Asynchronous Data Request Trigger contained {} bot inputs, should have only contained a single trigger.".format( len(inputs["inputs"]) ) ) botengine.set_inputs(execution_json) # Cannot execute timers during a data request trigger because those triggers execute concurrently with other executions. if trigger != 2048 and not botengine.edge: saved_timers = copy.copy(botengine.load_variable(TIMERS_VARIABLE_NAME)) delete_timers = [] if saved_timers is not None: botengine.get_logger(f"{'botengine'}").debug( "|_run() " + Color.PURPLE + "TIMER STACK: " + Color.END ) for t in saved_timers: botengine.get_logger(f"{'botengine'}").debug( "|_run() " + Color.PURPLE + "\t{}".format(t) + Color.END ) timers_existed |= len(saved_timers) > 1 # Double check our timers first, before giving up and letting the bot engine execute trigger type 64. for focused_timer in saved_timers: t = focused_timer[0] botengine.get_logger(f"{'botengine'}").debug( "|_run() " + Color.YELLOW + "Checking timer: {} at time {}".format(focused_timer, t) + Color.END ) if t != MAXINT: if t <= execution_json["time"]: botengine.get_logger(f"{'botengine'}").debug( "|_run() " + Color.PURPLE + "Executing timer. delta={} timer={}".format( execution_json["time"] - t, focused_timer ) + Color.END ) delete_timers.append(focused_timer) botengine.all_trigger_types.append(64) if callable(focused_timer[1]): focused_timer[1](botengine, focused_timer[2]) else: botengine.get_logger(f"{'botengine'}").error( "|_run() Timer fired and popped, but cannot call the focused timer: " + str(focused_timer) ) elif t > 1921875905000: # Great than year 2030 - mistake delete_timers.append(focused_timer) # The expired timers were removed from our variables in place above in the .pop() method. # Push the timers back onto our stack of things to flush. if len(delete_timers) > 0: save_timers = botengine.load_variable(TIMERS_VARIABLE_NAME) botengine.get_logger(f"{'botengine'}").debug( "|_run() " + Color.CYAN + "Re-Loaded Timers: {}".format(saved_timers) + Color.END ) for d in delete_timers: if d in save_timers: botengine.get_logger(f"{'botengine'}").debug( "|_run() " + Color.BLUE + "Delete this timer: {}".format(d) + Color.END ) save_timers.remove(d) botengine.get_logger(f"{'botengine'}").debug( "|_run() " + Color.GREEN + "Timers to Save: {}".format(save_timers) + Color.END ) botengine.save_variable(TIMERS_VARIABLE_NAME, save_timers) if trigger != 64: bot.run(botengine) elif saved_timers is not None and not timers_existed: # Adding this here to help diagnose variable vs. timer problems. # Hope to see that our timers fire correctly, but it's the variable that isn't storing # At least that would help focus our debug efforts # Removing these errors. Because what happens is the bot executes from a device measurement at the same time as a timer fire. # The device measurement, landing just after the timer is recorded to fire, pops the timer off the stack. # But the timer is still queued up to execute from the server. So a microsecond later, the bot executes again # from the server saying the timer needs to fire, but we already handled the timer on the previous execution. # Since there's no way to tell on this execution that it was already handled, there's no way to accurately say this is an error. botengine.get_logger(f"{'botengine'}").error( "|_run() Timer fired but no recollection as to why." ) botengine.get_logger(f"{'botengine'}").error( "|_run() timer variable is: " + str(saved_timers) ) pass if not botengine.edge: botengine.flush_commands() botengine.flush_questions() botengine.flush_analytics() # Also remember: Questions and Mixpanel always have to be flushed before flushing variables. botengine.flush_states() botengine.flush_binary_variables() if trigger != 2048 and not botengine.edge: saved_timers = botengine.load_variable(TIMERS_VARIABLE_NAME) if saved_timers is not None and len(saved_timers) > 0: while len(saved_timers) > 0: try: current_timer = saved_timers[0] if current_timer[0] != MAXINT: if next_timer_at_server is None or current_timer[0] < next_timer_at_server: # get current time current_time_ms = botengine.get_timestamp() time_difference = current_timer[0] - current_time_ms next_timer = inputs.get("timer", 0) if time_difference <= 0: # Execute this timer immediately botengine.get_logger(f"{'botengine'}").info( "|_run() Timer expired, executing immediately: {}".format(current_timer) ) next_timer = 0 pass elif time_difference < TIMER_MIN_MS: if abs(next_timer - current_timer[0]) > TIMER_MIN_MS: # The timer is too short, extend it if it is # outside of the minimum timer logic. botengine.get_logger(f"{'botengine'}").info( "|_run() Timer too short ({}ms), extend timer by {}ms. Timer details: {}".format(time_difference, TIMER_MIN_MS - time_difference, str(current_timer)) ) backup_timer = current_time_ms + TIMER_MIN_MS # Call _execute_again_at_timestamp() and check # the returned timer value # if timer scheduled correctly next_timer = botengine._execute_again_at_timestamp(backup_timer) elif next_timer > 0: if abs(next_timer - current_timer[0]) > TIMER_MIN_MS: # A timer is already set and needs to be updated # with a different timestamp if outside of the # minimum timer logic. botengine.get_logger(f"{'botengine'}").info( "|_run() Updating existing timer to a new timestamp: {} -> {}".format( next_timer, current_timer[0] )) # Call _execute_again_at_timestamp() and check # the returned timer value if timer scheduled # correctly next_timer = botengine._execute_again_at_timestamp(current_timer[0]) else: # No timer is set, so we can set the current timer next_timer = botengine._execute_again_at_timestamp(current_timer[0]) botengine.get_logger(f"{'botengine'}").info("|_run() Setting new timer: {}".format(next_timer)) if next_timer == 0: if time_difference > 0: # If the timer is 0 and we expect the timer to be # executed in the future, it means server cannot # schedule timer -execute the timer callback immediately botengine.get_logger(f"{'botengine'}").warning( "|_run() Timer execution failed or returned 0. Executing callback now. current_time_ms={} next_timer_at_server={} current_timer={}. saved_timers={}".format(current_time_ms, next_timer_at_server, current_timer, saved_timers) ) # Remove executed timer from saved_timers del saved_timers[0] botengine.save_variable(TIMERS_VARIABLE_NAME, saved_timers) # Execute timer callback immediately if callable(current_timer[1]): current_timer[1](botengine, current_timer[2]) saved_timers = botengine.load_variable(TIMERS_VARIABLE_NAME) else: botengine.get_logger(f"{'botengine'}").error( "|_run() Timer callback is not callable: {}".format(current_timer) ) continue # Continue to schedule the next available timer else: # Update timer schedule with the next timer described by the cloud saved_timers[0] = (next_timer, current_timer[1], current_timer[2], current_timer[3]) botengine.get_logger(f"{'botengine'}").info( "|_run() < Successfully set alarm: {}".format(saved_timers[0]) ) botengine.save_variable(TIMERS_VARIABLE_NAME, saved_timers) break # Successfully scheduled a timer, exit loop else: botengine.get_logger(f"{'botengine'}").info( "|_run() | Alarm already set: {}".format( current_timer ) ) break # Timer is already set, exit loop else: botengine.get_logger(f"{'botengine'}").debug("|_run() finished with MAXINT timer, no more timers to process.") break except Exception as e: botengine.get_logger(f"{'botengine'}").error( "|_run() Could not _execute_again_at_timestamp to set timer: {}".format( str(e) ) ) # Ensure saved_timers is not empty before accessing index 0 if len(saved_timers) > 0: # Remove the executed/failed timer from saved_timers del saved_timers[0] botengine.save_variable(TIMERS_VARIABLE_NAME, saved_timers) # Execute the callback if it is callable if callable(saved_timers[0][1]): saved_timers[0][1](botengine, saved_timers[0][2]) saved_timers = botengine.load_variable(TIMERS_VARIABLE_NAME) else: botengine.get_logger(f"{'botengine'}").error( "|_run() Timer callback is not callable: {}".format(saved_timers) ) if len(saved_timers) > 0: botengine.flush_binary_variables() # Continue to the next timer continue # Non-time-critical outputs to wrap up botengine.flush_binary_variables() botengine.flush_rules() botengine.flush_tags() botengine.flush_asynchronous_requests() botengine.get_logger(f"{'botengine'}").debug( "<_run() Execution Complete: {}".format( bot.get_intelligence_statistics(botengine) if hasattr(bot, "get_intelligence_statistics") else {} ) ) return botengine # =============================================================================== # BotEngine Class # =============================================================================== class BotEngine: """This BotEngine class runs your bot and connects to the Bot Server""" # Trigger Types TRIGGER_UNPAUSED = 0 # 0 TRIGGER_SCHEDULE = 1 << 0 # 1 TRIGGER_MODE = 1 << 1 # 2 TRIGGER_DEVICE_ALERT = 1 << 2 # 4 TRIGGER_DEVICE_MEASUREMENT = 1 << 3 # 8 TRIGGER_QUESTION_ANSWER = 1 << 4 # 16 TRIGGER_DEVICE_FILES = 1 << 5 # 32 TRIGGER_TIMER = 1 << 6 # 64 TRIGGER_METADATA = 1 << 7 # 128 TRIGGER_DATA_STREAM = 1 << 8 # 256 TRIGGER_COMMAND_RESPONSE = 1 << 9 # 512 TRIGGER_LOCATION_CONFIGURATION = 1 << 10 # 1024 TRIGGER_DATA_REQUEST = 1 << 11 # 2048 TRIGGER_MESSAGES = 1 << 12 # 4096 TRIGGER_DOCUMENTS = 1 << 13 # 8192 # Access category types ACCESS_CATEGORY_MODE = 1 ACCESS_CATEGORY_FILE = 2 ACCESS_CATEGORY_PROFESSIONAL_MONITORING = 3 ACCESS_CATEGORY_DEVICE = 4 ACCESS_CATEGORY_CHALLENGE = 5 ACCESS_CATEGORY_RULES = 6 # Question Responses QUESTION_RESPONSE_TYPE_BOOLEAN = 1 QUESTION_RESPONSE_TYPE_MULTICHOICE_SINGLESELECT = 2 QUESTION_RESPONSE_TYPE_MULTICHOICE_MULTISELECT = 4 QUESTION_RESPONSE_TYPE_DAYOFWEEK = 6 QUESTION_RESPONSE_TYPE_SLIDER = 7 QUESTION_RESPONSE_TYPE_TIME = 8 QUESTION_RESPONSE_TYPE_DATETIME = 9 QUESTION_RESPONSE_TYPE_TEXT = 10 # Question display types # BOOLEAN QUESTIONS QUESTION_DISPLAY_BOOLEAN_ONOFF = 0 QUESTION_DISPLAY_BOOLEAN_YESNO = 1 QUESTION_DISPLAY_BOOLEAN_BUTTON = 2 QUESTION_DISPLAY_BOOLEAN_THUMBS = 3 # MULTIPLE CHOICE - MULTIPLE SELECT QUESTIONS QUESTION_DISPLAY_MCMS_CHECKBOX = 0 # MULTIPLE CHOICE - SINGLE SELECT QUESTIONS QUESTION_DISPLAY_MCSS_RADIO_BUTTONS = 0 QUESTION_DISPLAY_MCSS_PICKER = 1 QUESTION_DISPLAY_MCSS_SLIDER = 2 QUESTION_DISPLAY_MCSS_MODAL_BOTTOM_SHEET = 3 # DAY OF WEEK QUESTIONS QUESTION_DISPLAY_DAYOFWEEK_MULTISELECT = 0 QUESTION_DISPLAY_DAYOFWEEK_SINGLESELECT = 1 # SLIDER QUESTION_DISPLAY_SLIDER_INTEGER = 0 QUESTION_DISPLAY_SLIDER_FLOAT = 1 QUESTION_DISPLAY_SLIDER_MINSEC = 2 # TIME QUESTION_DISPLAY_TIME_HOURS_MINUTES_SECONDS_AMPM = 0 QUESTION_DISPLAY_TIME_HOURS_MINUTES_AMPM = 1 # DATETIME QUESTION_DISPLAY_DATETIME_DATE_AND_TIME = 0 QUESTION_DISPLAY_DATETIME_DATE = 1 # Answer Status ANSWER_STATUS_NOT_ASKED = -1 ANSWER_STATUS_DELAYED = 0 ANSWER_STATUS_QUEUED = 1 ANSWER_STATUS_AVAILABLE = 2 ANSWER_STATUS_SKIPPED = 3 ANSWER_STATUS_ANSWERED = 4 ANSWER_STATUS_NO_ANSWER = 5 # Professional Monitoring PROFESSIONAL_MONITORING_NEVER_PURCHASED = 0 PROFESSIONAL_MONITORING_PURCHASED_BUT_NOT_ENOUGH_INFO = 1 PROFESSIONAL_MONITORING_REGISTRATION_PENDING = 2 PROFESSIONAL_MONITORING_REGISTERED = 3 PROFESSIONAL_MONITORING_CANCELLATION_PENDING = 4 PROFESSIONAL_MONITORING_CANCELLED = 5 # Professional monitoring alert status PROFESSIONAL_MONITORING_ALERT_STATUS_QUIET = 0 PROFESSIONAL_MONITORING_ALERT_STATUS_RAISED = 1 PROFESSIONAL_MONITORING_ALERT_STATUS_CANCELLED = 2 PROFESSIONAL_MONITORING_ALERT_STATUS_REPORTED = 3 # Rule status RULE_STATUS_INCOMPLETE = 0 RULE_STATUS_ACTIVE = 1 RULE_STATUS_INACTIVE = 2 # Data stream destinations DATASTREAM_ORGANIZATIONAL_FIELD_TO_INDIVIDUALS = 1 DATASTREAM_ORGANIZATIONAL_FIELD_TO_ORGANIZATIONS = 2 DATASTREAM_ORGANIZATIONAL_FIELD_TO_ALL = 3 # Narrative priority levels NARRATIVE_PRIORITY_ANALYTIC = -1 NARRATIVE_PRIORITY_DEBUG = 0 NARRATIVE_PRIORITY_DETAIL = 0 NARRATIVE_PRIORITY_INFO = 1 NARRATIVE_PRIORITY_WARNING = 2 NARRATIVE_PRIORITY_CRITICAL = 3 # Narrative types # High-frequency 'observation' entries for explainable AI and accountability NARRATIVE_TYPE_OBSERVATION = 0 # Low-frequency 'journal' entries for SUMMARIZED exec-level communications to humans NARRATIVE_TYPE_JOURNAL = 4 # High-frequency 'insight' entries for real-time CRITICAL exec-level communications to humans NARRATIVE_TYPE_INSIGHT = 5 # Alert Categories ALERT_CATAGORY_NONE = 0 ALERT_CATEGORY_LIVE_HERE = 1 ALERT_CATEGORY_FAMILY_FRIEND = 2 ALERT_CATEGORY_SOCIAL_REMINDERS_ONLY = 3 # Location Access LOCATION_ACCESS_NONE = 0 LOCATION_ACCESS_READONLY = 10 LOCATION_ACCESS_CONTROL_DEVICES = 20 LOCATION_ACCESS_CONTROL_EVERYTHING = 30 # Support Ticket Types TICKET_TYPE_PROBLEM = 1 TICKET_TYPE_INCIDENT = 2 TICKET_TYPE_QUESTION = 3 TICKET_TYPE_TASK = 4 # Support Ticket Priorities TICKET_PRIORITY_LOW = 1 TICKET_PRIORITY_NORMAL = 2 TICKET_PRIORITY_HIGH = 3 TICKET_PRIORITY_URGENT = 4 # Tagging TAG_TYPE_USERS = 1 TAG_TYPE_LOCATIONS = 2 TAG_TYPE_DEVICES = 3 TAG_TYPE_FILES = 4 # Data request types DATA_REQUEST_TYPE_PARAMETERS = 1 DATA_REQUEST_TYPE_ACTIVITIES = 2 DATA_REQUEST_TYPE_LOCATIONS = 3 DATA_REQUEST_TYPE_MODES = 4 DATA_REQUEST_TYPE_NARRATIVES = 5 DATA_REQUEST_TYPE_DEVICES = 6 BOT_TYPE_LOCATION = 0 BOT_TYPE_ORGANIZATION = 1 BOT_TYPE_ORGANIZATION_RAG = 2 def __init__( self, raw_inputs, server_override=None, services=None, count=None, cloud=None, edge=False, local=False, playback=False, context=None, bot_instance_id=None, local_execution_count=None, ): """ Constructor :param raw_inputs: The entire input JSON string from the Bot Server :param server_override: Option to override the server URL when executing on someone's computer instead of in the cloud :param services: List of subscription services in the user's account :param count: Each time the bot triggers, the count should increment by 1 when running on the server. The count is always 0 when running locally on a computer. :param cloud: Description of the cloud server :param edge: True if the edge owns execution of the bot instead of the cloud :param bot_instance_id: Instance ID of this bot """ bot_type = self.get_bot_type() # User's API key if bot_type != BotEngine.BOT_TYPE_ORGANIZATION_RAG: if "apiKey" not in raw_inputs: raise BotError("No API key provided in the inputs") self.__key = raw_inputs["apiKey"] # Remove the evidence of the API key and host so they can't be seen by probing externally del raw_inputs["apiKey"] # Server to connect with if server_override is not None: self._servers = [server_override] elif "apiHosts" in raw_inputs: self._servers = raw_inputs["apiHosts"] elif "apiHost" in raw_inputs: self._servers = [raw_inputs["apiHost"]] else: # No API Key self.__key = None # No servers self._servers = [] # Raw inputs for debugging reference self._raw_inputs = raw_inputs # The current server index used for requests self._server_index = 0 # Lock the server index when we successfully post the start key self._server_locked = False # Dynamically imported requests module self._requests = importlib.import_module("requests") # Description of the cloud we are running on self._cloud = cloud # List of the user's services dictionaries self.services = services # List of all the trigger types we expect to execute in and around this single execution instance self.all_trigger_types = [] # Trigger count, which is incremented on every trigger when running on the server self.count = count # Dictionary of variables by name self.variables = {} # Dictionary of the variables that need to be stored to the cloud, by name self.variables_to_flush = {} # State content in our cache. { timestamp_ms : { state_json_dictionary } } # This does not include extra STATE_KEY_* fields used in the self.states_to_flush cache, it's just the raw state content # Non-time-series states simply have a timestamp_ms of None. self.states = {None: {}} # State content to flush. # Each state to flush will include extra fields beyond the content to declare how to save the state. # Non-time-series states will simply have a timestamp_ms of None. # { # timestamp_ms : { # "state_address": { # STATE_KEY_CONTENT: json_content, # STATE_KEY_PUBLISH: True or False to publish to a partner, # STATE_KEY_OVERWRITE: True to overwrite the whole JSON content on the server, or False if we're just providing a few top-level keys to update. # STATE_KEY_UPDATE_LIST: [optional, list, of, top-level, keys, added, or, updated], # STATE_KEY_DELETE_LIST: [optional, list, of, top-level, keys, deleted] # } # } # } self.states_to_flush = {} # Question that was answered as we triggered the bot from an answered question self.question_answered = None # self.commands_to_flush format: [ << array of devices, with an array of params in each device >> ] self.commands_to_flush = [] # Asynchronous data requests self.data_requests = [] # Tags to create on the server self.tags_to_create = [] # Tags to delete on the server self.tags_to_delete = [] # Dictionary of tags to create by user ID, for admins self.tags_to_create_by_user = {} # Dictionary of tags to delete by user ID, for admins self.tags_to_delete_by_user = {} # Dictionary of rules to toggle self.rules = {} # Dictionary of new questions to ask self.questions_to_ask = {} # Dictionary of questions to delete self.questions_to_delete = {} # Organization properties to override local bot default settings self.organization_properties = {} # We only need to issue an API call to cancel timers once per execution self.cancelled_timers = False # This is the total number of triggers we'll be handling in this execution. Primarily used for debugging. self.triggers_total = 0 # When we have multiple triggers in one execution, this is which trigger number we're handling now. self.triggers_index = 0 # Cache for location users information so we never call it multiple times self._location_users_cache = None # User ID executing that caused this trigger self.user_id = None # Is this bot executing on the edge self.edge = edge # Is this bot executing locally on a developer laptop self.local = local # How many times has this bot been run locally # Enabling us to know if it's the first run and a new_version() should be triggered, or gather stats if we want. self.local_execution_count = local_execution_count # True if this bot is being executed with previously recorded data self.playback = playback # What is this bot's instance ID self.bot_instance_id = bot_instance_id # Forcefully set to True to declare this bot is currently executing in the cloud. # This is set to False inside botclient.py where we execute on the edge. self.executing_in_cloud = True # HTTP Session self.session = self._requests.Session() all_triggers = [] if "inputs" in raw_inputs: for i in raw_inputs["inputs"]: all_triggers.append(i["trigger"]) debug_instance_id = 0 if bot_instance_id is not None: debug_instance_id = bot_instance_id self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "|__init__() Instance ID: {}; Triggers: {}".format( debug_instance_id, all_triggers ) ) if self.local: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "|__init__() Running locally" ) return if self.playback: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "|__init__() Running in playback mode" ) return if bot_type == BotEngine.BOT_TYPE_ORGANIZATION_RAG: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "|__init__() Running as an organization RAG bot" ) return if "startKey" not in raw_inputs: raise Exception("No start key in inputs") if raw_inputs["startKey"] == 0: raise Exception("Start key is 0") if not self._start(raw_inputs["startKey"], context=context): raise Exception( "Failed to start bot with start key: " + str(raw_inputs["startKey"]) ) self._server_locked = True def _start_core_variables_thread(self): if not hasattr(self, "thread_event"): import threading self.thread_event = threading.Event() self.request_thread = threading.Thread( target=self._download_core_variables_async, args=(self.thread_event,) ) if not self.thread_event.is_set(): self.request_thread.start() def is_core_variables_downloaded(self): return not self.thread_event.is_set() # =========================================================================== # HTTP Methods # =========================================================================== def _http_get(self, path, headers={}, params=None, timeout=5, stream=False): """ HTTP GET :param path: Path to retrieve :param headers: Dictionary of headers, which will override any default headers :param params: Dictionary of parameters :param timeout: Timeout in seconds, default is 5 :param stream: True to stream. Default is False. :return: Response object from Requests module """ if self.playback: return if self.get_bot_type() == BotEngine.BOT_TYPE_ORGANIZATION_RAG: # Organization RAG bots cannot call APIs return {} global _https_proxy h = self._build_common_headers() h.update(headers) while True: try: r = self.session.get( self._servers[self._server_index] + path, params=params, headers=h, timeout=timeout, proxies=_https_proxy, stream=stream, ) return r except self._requests.HTTPError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP error calling GET " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.ConnectionError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Connection HTTP error calling GET " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.Timeout as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( str(timeout) + " second HTTP Timeout calling POST " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) timeout += 10 if timeout >= 30: raise self._requests.Timeout() if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.TooManyRedirects as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Too many redirects HTTP error calling GET " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except Exception as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP exception calling GET " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) def _http_post(self, path, headers={}, params=None, data=None, timeout=5): """ HTTP POST :param path: Path to retrieve :param headers: Dictionary of headers, which will override any default headers :param params: Dictionary of parameters :param data: Data to POST :param timeout: Timeout in seconds, default is 5 :return: Response object from Requests module """ if self.playback: return if self.get_bot_type() == BotEngine.BOT_TYPE_ORGANIZATION_RAG: # Organization RAG bots cannot call APIs return {} global _https_proxy h = self._build_common_headers() h.update(headers) while True: try: r = self.session.post( self._servers[self._server_index] + path, params=params, headers=h, data=data, timeout=timeout, proxies=_https_proxy, ) return r except self._requests.HTTPError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP error calling POST " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.ConnectionError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Connection HTTP error calling POST " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.Timeout as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( str(timeout) + " second HTTP Timeout calling POST " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) timeout += 5 if timeout >= 25: raise self._requests.Timeout() if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.TooManyRedirects as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Too many redirects HTTP error calling POST " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except Exception as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP exception calling POST " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) def _http_put(self, path, headers={}, params=None, data=None, timeout=5): """ HTTP PUT :param path: Path to retrieve :param headers: Dictionary of headers, which will override any default headers :param params: Dictionary of parameters :param data: Data to PUT :param timeout: Timeout in seconds, default is 5 :return: Response object from Requests module """ if self.playback: return if self.get_bot_type() == BotEngine.BOT_TYPE_ORGANIZATION_RAG: # Organization RAG bots cannot call APIs return {} global _https_proxy h = self._build_common_headers() h.update(headers) while True: try: r = self.session.put( self._servers[self._server_index] + path, params=params, headers=h, data=data, timeout=timeout, proxies=_https_proxy, ) return r except self._requests.HTTPError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP error calling PUT " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.ConnectionError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Connection HTTP error calling PUT " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.Timeout as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").error( str(timeout) + " second HTTP Timeout calling PUT " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) timeout += 5 if timeout >= 25: raise self._requests.Timeout() if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.TooManyRedirects as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Too many redirects HTTP exception calling PUT " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except Exception as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP exception calling PUT " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) def _http_delete(self, path, headers={}, params=None, timeout=5): """ HTTP PUT :param path: Path to retrieve :param headers: Dictionary of headers, which will override any default headers :param params: Dictionary of parameters :param data: Data to PUT :param timeout: Timeout in seconds, default is 5 :return: Response object from Requests module """ if self.playback: return if self.get_bot_type() == BotEngine.BOT_TYPE_ORGANIZATION_RAG: # Organization RAG bots cannot call APIs return {} global _https_proxy h = self._build_common_headers() h.update(headers) while True: try: r = self.session.delete( self._servers[self._server_index] + path, params=params, headers=h, timeout=timeout, proxies=_https_proxy, ) return r except self._requests.HTTPError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP error calling DELETE " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.ConnectionError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Connection HTTP error calling DELETE " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.Timeout as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( str(timeout) + " second HTTP Timeout calling DELETE " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) timeout += 5 if timeout >= 25: raise self._requests.Timeout() if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except self._requests.TooManyRedirects as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Too many redirects HTTP error calling DELETE " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) except Exception as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP exception calling DELETE " + str(self._servers[self._server_index] + path) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) if self._server_locked: continue self._server_index += 1 self._server_index %= len(self._servers) # =========================================================================== # System Helper Methods # =========================================================================== def set_inputs(self, inputs): """ Set the inputs for this execution :param inputs: Inputs for this next execution """ self.inputs = inputs if "userId" in inputs: self.user_id = inputs["userId"] if self.inputs["trigger"] == BotEngine.TRIGGER_QUESTION_ANSWER: question_block = self.inputs["question"] self.resynchronize_questions() saved_questions = self.load_variable(QUESTIONS_VARIABLE_NAME) if saved_questions is not None: if question_block["key"] in saved_questions: self.question_answered = saved_questions[question_block["key"]] if "userId" in inputs: self.question_answered.user_id = inputs["userId"] else: self.question_answered.user_id = None # Extract custom organization bot properties if "access" in inputs: for access in inputs["access"]: try: properties = access["location"]["organization"]["properties"] # Set each organization property but remove the static bot identifier "bot." from the beginning for key in properties: # Try to normalize the value try: value = eval(properties[key], {}, {}) except Exception: if properties[key] in ["true", "True"]: value = True elif properties[key] in ["false", "False"]: value = False else: value = properties[key] self.organization_properties[key.replace("bot.", "")] = value break except Exception: pass def _enable_debug(self): """ Enable debug logging """ logging.basicConfig() logging.getLogger().setLevel(logging.DEBUG) requests_log = logging.getLogger("requests.packages.urllib3") requests_log.setLevel(logging.DEBUG) requests_log.propagate = True def _build_common_headers(self, content_type="application/json"): """ Build common HTTP headers for API calls :param content_type: Content type of the request, default is 'application/json' :return: HTTP Header dictionary to inject into HTTP requests """ return { "ANALYTIC_API_KEY": self.__key, "Content-Type": content_type, "User-Agent": "BotEngine/" + str(__version__), } def _start(self, start_key, context=None): """ Called once at the start of execution to prevent 2 bots of the same instance from executing in parallel. The first server to return a success is used for the rest of the execution. :param start_key: :return: True if the start is successful; False if it's not successful """ params = {"startKey": start_key} if context is not None and self.is_server_version_newer_than(1, 10): params["awsRequestId"] = context.aws_request_id params["logStreamName"] = context.log_stream_name r = self._http_post( "/analytic/start", params=params, timeout=DEFAULT_START_KEY_TIMEOUT_S, ) logger = self.get_logger(f"{'botengine'}.{__class__.__name__}") logger.debug( "|_start() Notify cloud that the bot is starting: /analytic/start params={}".format( params ) ) try: j = json.loads(r.text) logger.debug("|_start() Cloud response to /analytic/start: {}".format(j)) _check_for_errors(j) if "resultCode" in j: if j["resultCode"] != 0: # This execution is not allowed logger.start_code = j["resultCode"] raise BotError( "Wrong Start API result code. Response={}".format( json.dumps(j) ), j["resultCode"], ) logger.debug("|_start() Success, the bot has started {}".format(j)) return True except json.decoder.JSONDecodeError as e: # Probably because the server is restarting logger.warning("|_start() Failure, the bot has not started. {}".format(e)) return False except BotError as e: logger.error("|_start() Failure, the bot has not started.. {}".format(e)) return False # =========================================================================== # Developer helper methods # =========================================================================== def get_inputs(self): """ This method will return the inputs, e.g. apiKey, apiHost, trigger type, alerts, measurement blocks :return: all inputs in a JSON dictionary format """ return self.inputs def get_trigger_type(self): """ This method will return the type of trigger that your bot uses * Trigger 1 = Schedule (based off a cron schedule inside the runtime.json file) * Trigger 2 = Location Event (switching between home / away / etc.) * Trigger 4 = Device Alert * Trigger 8 = Device Measurements * Trigger 16 = Question Answered * Trigger 32 = New device file (like a video or picture) * Trigger 64 = Execute Again Countdown Timer * Trigger 256 = Data Stream Message :return: Trigger type that triggered this bot """ return int(self.inputs["trigger"]) def get_triggers(self): """ This method will find and return the information about what triggered this bot. Location Events (Modes) Example: { 'category':1, 'control':True, 'trigger':True, 'location':{ 'locationId':62, 'prevEvent':'HOME', 'event':'AWAY' }, 'read':True } Device Measurements Example: { 'trigger':True, 'device':{ 'deviceType':10014, 'updateDate':1465517032000, 'deviceId':'FFFFFFFF00600a70', 'description':'Practice\xa0Entry\xa0Sensor', 'measureDate':1465517031000 }, 'read':True, 'control':True, 'category':4 } :return: JSON structure describing what triggered this bot """ if "access" not in self.inputs: return [] self.trigger_blocks = [] for block in self.inputs["access"]: if "trigger" in block: if block["trigger"]: self.trigger_blocks.append(block) return self.trigger_blocks def get_measures_block(self): """ :return: the measurements block from our inputs, if any """ if "measures" in self.inputs: return self.inputs["measures"] return None def get_alerts_block(self): """ :return: the alerts block from our inputs, if any """ if "alerts" in self.inputs: return self.inputs["alerts"] return None def get_access_block(self): """ :return: the access block from our inputs, if any """ if "access" in self.inputs: return self.inputs["access"] return None def get_device_access_block(self, device_id): """ Return the access block for a specific device :param device_id: Device ID to search for :return: Access block for a specific device, None if it doesn't exist """ if "access" in self.inputs: for a in self.inputs["access"]: if a["category"] == BotEngine.ACCESS_CATEGORY_DEVICE: if "device" in a: if a["device"]["deviceId"] == device_id: return a return None def is_executing_timer(self): """ :return: True if this execution includes a timer fire """ return 64 in self.all_trigger_types def get_bundle_id(self): """ When you generate a bot, botengine will automatically generate and add a 'bundle.py' file which contains the bundle ID. This method simply returns the bundle ID from the contents of that file. :return: The bundle ID for this bot """ import bundle # type: ignore return bundle.BUNDLE_ID def get_cloud_address(self): """ When you generate a bot, botengine will automatically generate and add a 'bundle.py' file which contains the cloud address we're uploading the bot to. This method simply returns the CLOUD_ADDRESS from the contents of the bundle.py file. :return: The cloud address for this bot """ import bundle # type: ignore return bundle.CLOUD_ADDRESS def get_bot_type(self): """ When you generate a bot, botengine will automatically generate and add a 'bundle.py' file which contains the bot type. This method simply returns the BOT_TYPE from the contents of the bundle.py file. :return: The bot type for this bot """ import bundle # type: ignore return int(bundle.BOT_TYPE if hasattr(bundle, "BOT_TYPE") else 0) def get_bot_instance_id(self): """ :return: The bot instance ID """ return self.bot_instance_id def get_location_id(self): """ :return: The location ID for this bot """ return self.inputs["locationId"] def get_organization_id(self): """ :return: The organization ID for this bot """ info = self.get_location_info() if "location" in info: if "organizationId" in info["location"]: return int(info["location"]["organizationId"]) return None def get_organization_name(self): """ :return: The name of the organization this location belongs to. """ info = self.get_location_info() if "location" in info: if "organization" in info["location"]: if "organizationName" in info["location"]["organization"]: return info["location"]["organization"]["organizationName"] return "Organization ID {}".format(self.get_organization_id()) def get_organization_signup_code(self): """ :return: The sign-up code (short domain name) of the organization this location belongs to. Or None if we don't have it for some reason. """ info = self.get_location_info() if "location" in info: if "organization" in info["location"]: if "domainName" in info["location"]["organization"]: return info["location"]["organization"]["domainName"] return None def get_country_code(self): """ :return: The country code of the location. """ info = self.get_location_info() if "location" in info: if ( "country" in info["location"] and "countryCode" in info["location"]["country"] ): return info["location"]["country"]["countryCode"] return None def get_location_info(self): """ Returns this information: { "category": 1, "control": true, "location": { "event": "STAY", "latitude": "47.72328", "locationId": 755735, "longitude": "-122.17426", "name": "Apartment 103", "timezone": { "dst": true, "id": "US/Pacific", "name": "Pacific Standard Time", "offset": -480 }, "zip": "98034" }, "read": true, "trigger": false } :param location_id: Location ID to extract :return: location information from the access block """ for access in self.inputs.get("access", []): if access.get("category", "") == self.ACCESS_CATEGORY_MODE: return access return None def is_playback(self): return self.playback def is_test_location(self): """ Determine if this is a test (internal employee) location, possibly to avoid logging analytics. :return: True if this is a test location """ info = self.get_location_info() if "location" in info: if "test" in info["location"]: return info["location"]["test"] return False def get_location_name(self): """ :return: Name of this location """ location_info = self.get_location_info() if location_info is None: return "Home" if "location" not in location_info: return "Home" if "name" not in location_info["location"]: return "Home" return location_info["location"]["name"] def get_location_latitude(self): """ :return: Location latitude, or None if it doesn't exist """ location_info = self.get_location_info() if location_info is None: return None if "location" not in location_info: return None if "latitude" not in location_info["location"]: return None return float(location_info["location"]["latitude"]) def get_location_longitude(self): """ :return: Location longitude, or None if it doesn't exist """ location_info = self.get_location_info() if location_info is None: return None if "location" not in location_info: return None if "longitude" not in location_info["location"]: return None return float(location_info["location"]["longitude"]) def get_answered_question(self): """ :return: the question that has been answered, if any """ return self.question_answered def get_datastream_block(self): """ :return: the data stream inputs, if any """ if "dataStream" in self.inputs: return self.inputs["dataStream"] return None def get_input_key(self): """ :return: the key provided by the input, if any """ if "key" in self.inputs: return self.inputs["key"] return None def get_file_block(self): """ :return: the 'file' block for an uploaded file, if any """ if "file" in self.inputs: return self.inputs["file"] return None def get_timestamp(self): """ :return: the Unix timestamp of this execution, in milliseconds """ return self.inputs["time"] def get_data_stream_message(self): self.get_datastream_block() def get_documents_request_id(self): """ :return: the documents request ID """ return self.inputs.get("requestId", None) def get_document_block(self): """ :return: the 'document' block for location configuration triggers """ return self.inputs.get("document", None) def get_users_block(self): """ :return: The 'users' block for location configuration triggers """ if "users" in self.inputs: return self.inputs["users"] return None def get_callcenter_block(self): """ :return: The 'callCenter' block for location configuration triggers """ if "callCenter" in self.inputs: return self.inputs["callCenter"] return None def get_data_block(self): """ :return: The 'data' block for asynchronous data request inputs """ if "data" in self.inputs: return self.inputs["data"] return None def get_messages_block(self): """ :return: the messages block from our inputs, if any """ if "messages" in self.inputs: return self.inputs["messages"] return None def get_property( self, obj_arr, property_name, property_value, return_property_name ): """ This method will locate the specified first object from object array, and then return the specified property value :param obj_arr: object array :param property_name: the key in searching criteria :param property_value: the value in searching criteria :param return_property_name: the key in the object indicates which corresponding value will be returned """ response = None for item in obj_arr: if item[property_name] == property_value: response = item[return_property_name] break return response def get_language(self, user_id=None): """ Get the language for the bot associated with this organization, bot domain, location, or user. :param user_id: User ID to get the language for. :return: language code """ import properties language_code = properties.get_property(self, "DEFAULT_LANGUAGE", complain_if_missing=False) location_info = self.get_location_info() if location_info is not None: if "language" in location_info["location"]: language_code = location_info["location"]["language"] if user_id is not None: user = self.get_location_user(user_id) if user is not None: if "language" in user: language_code = user["language"] # Default to English return language_code if language_code is not None else "en" def is_server_version_newer_than(self, major, minor): """ Compare the server version. :param major: Major value :param minor: Minor value :return: True if the server version is greater than or equal to the input version """ semantic_version = str(self._raw_inputs["version"]).split(".") _major = semantic_version[0] _minor = 0 if len(semantic_version) > 1: _minor = semantic_version[1] is_newer = int(_major) > int(major) or ( int(_major) == int(major) and int(_minor) >= int(minor) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine.is_server_version_newer_than: {}.{} >> {}.{} == {}".format( _major, _minor, major, minor, is_newer ) ) return is_newer # =========================================================================== # Users # =========================================================================== def get_location_users(self): """ Get the list of users at this location ID https://iotapps.docs.apiary.io/#reference/locations/location-users/get-location-users :param location_id: Location ID """ if self.playback: self._location_users_cache = [ { "id": 123, "userName": "john.smith@gmail.com", "altUsername": "1234567890", "firstName": "John", "lastName": "Smith", "nickname": "Johnny", "email": { "email": "john.smith@gmail.com", "verified": True, "status": 0, }, "phone": "1234567890", "phoneType": 1, "smsStatus": 1, "locationAccess": 10, "temporary": True, "accessEndDate": "2019-01-29T02:45:30Z", "accessEndDateMs": 1548747995000, "category": 1, "role": 1, "smsPhone": "1234567899", "language": "en", "avatarFileId": 123, "schedules": [ {"daysOfWeek": 127, "startTime": 10800, "endTime": 20800} ], }, { "id": 124, "userName": "jane.smith@gmail.com", "altUsername": "1234567891", "firstName": "Jane", "lastName": "Smith", "nickname": "Janey", "email": { "email": "jane.smith@gmail.com", "verified": True, "status": 0, }, "phone": "1234567891", "phoneType": 1, "smsStatus": 1, "locationAccess": 40, "temporary": True, "accessEndDate": "2019-01-29T02:45:30Z", "accessEndDateMs": 1548747995000, "category": 1, "role": 4, "smsPhone": "1234567899", "language": "en", "avatarFileId": 123, "schedules": [ {"daysOfWeek": 127, "startTime": 10800, "endTime": 20800} ], }, ] if self._location_users_cache is not None: return self._location_users_cache r = self._http_get( "/cloud/json/location/{}/users".format(self.get_location_id()) ) j = json.loads(r.text) _check_for_errors(j) if "users" in j: self._location_users_cache = j["users"] return j["users"] else: self._location_users_cache = [] return [] def get_location_user(self, user_id): """ Retrieve all information about one specific user at this location ID :param user_id: :return: """ users = self.get_location_users() for user in users: if user["id"] == user_id: return user return None def set_location_user_alert_category(self, user_id, category): """ Set the alert category of a specific user associated with this location. One way to use this is to opt-out users from receiving SMS messages when they reply "STOP" :param botengine: BotEngine environment :param user_id: User ID :param category: Category to move to. 0=No Alerts; 1=I live here; 2=Family/Friend; 3=social reminders only """ body = {"user": {"id": user_id, "category": category}} self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "botengine.set_user_alert_category: \n{}".format( json.dumps(body, sort_keys=True) ) ) r = self._http_put( "/cloud/json/location/{}/users".format(self.get_location_id()), data=json.dumps(body), ) j = json.loads(r.text) _check_for_errors(j) def set_location_user_access_category(self, user_id, location_access_level): """ Set the location access level for a specific user associated with this location. One way to use this is to opt-out users from receiving SMS messages when they reply "STOP" :param botengine: BotEngine environment :param user_id: User ID :param location_access_level: 0 = No Access; 10 = Read all location and device information; 20 = Control location modes and control devices; 30 = Administrate location and manage devices """ body = {"user": {"id": user_id, "locationAccess": location_access_level}} self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "botengine.set_location_user_access_category: \n{}".format( json.dumps(body, sort_keys=True) ) ) r = self._http_put( "/cloud/json/location/{}/users".format(self.get_location_id()), data=json.dumps(body), ) j = json.loads(r.text) _check_for_errors(j) def get_resident_last_names(self): """ Return a string that represents the resident's last names. If there are no last names, it returns "" If there is one last name, that last name is returned. If there are two or more last names like Moss and Neufeld then "Moss/Neufeld" is returned. :return: String representing location residents' last names. """ last_name = "" first = True users = self.get_location_users() last_names_list = [] for user in users: if user["category"] == 1: # This is a resident of the location if "lastName" in user: if first: first = False last_name = user["lastName"] last_names_list.append(user["lastName"].lower().strip()) else: if ( user["lastName"].lower().strip() not in last_names_list and user["lastName"].strip() != "" ): last_name += "/{}".format(user["lastName"]) last_names_list.append(user["lastName"].lower().strip()) return last_name def get_location_user_names( self, to_residents=True, to_supporters=True, sms_only=True ): """ Get a list of users' names associated with the location. [ { 'firstName': 'David' 'lastName': 'Moss' }, ... ] :param residents: Extract residents :param supporters: Extract supporters :param sms_only: True if we only want to extract users who we can SMS :return: List of dictionaries containing first and last names """ names = [] users = self.get_location_users() if len(users) > 0: for user in users: if (to_residents and user["category"] == 1) or ( to_supporters and user["category"] == 2 ): if sms_only: if "smsStatus" in user: if user["smsStatus"] == 3: continue if "phoneType" in user: if user["phoneType"] != 1: continue if "phoneChannels" in user: if "sms" in user["phoneChannels"]: if not user["phoneChannels"]["sms"]: continue name = {"firstName": None, "lastName": None} if "firstName" in user: name["firstName"] = user["firstName"] if "lastName" in user: name["lastName"] = user["lastName"] names.append(name) return names return [] def get_name_by_user_id(self, user_id): """ Returns a dictionary with 'firstName' and 'lastName' if the user exists, or None if the user doesn't exist :param user_id: User ID to extract the name :return: { 'firstName': "David", 'lastName': "Moss" } """ users = self.get_location_users() for user in users: if int(user["id"]) == int(user_id or -1): name = {"firstName": "", "lastName": ""} if "firstName" in user: name["firstName"] = user["firstName"] if "lastName" in user: name["lastName"] = user["lastName"] return name return None def get_formatted_name_by_user_id(self, user_id): """ Returns the name like "David Moss" :param user_id: :return: """ name = self.get_name_by_user_id(user_id) if name is not None: return "{} {}".format(name["firstName"], name["lastName"]).strip() return None def get_organization_locations(self, organization_id): """ Get a list of locations within an organization, as a bot with admin priviledges :param organization_id: :return: """ params = {"organizationId": organization_id} r = self._http_get("/admin/json/locations", params=params) j = json.loads(r.text) _check_for_errors(j) return j["locations"] # =========================================================================== # Variables # =========================================================================== def save_variable( self, name, value, required_for_each_execution=False, shared=False ): """ This method will cache a single variable to be saved to the cloud upon flush_variables() BotEngine will always flush variables to the cloud at the end of executing the bot. We use 'dill' to serialize data. Dill is a form of 'pickle', but can also store things like nested objects According to https://docs.python.org/3/library/pickle.html, the following can be pickled: * None, True, and False * integers, floating point numbers, complex numbers * strings, bytes, bytearrays * tuples, lists, sets, and dictionaries containing only picklable objects * functions defined at the top level of a module (using def, not lambda) * built-in functions defined at the top level of a module * classes that are defined at the top level of a module * instances of such classes whose __dict__ or the result of calling __getstate__() is picklable (see section Pickling Class Instances for details). There are some machine learning models that have a difficult time inside dill and resule in PyCapsule errors. We've had better luck pickling those objects before saving them, and then unpickling them on the way out. Do this at the application layer. :param name: Custom name of the variable to persist to the cloud :param value: Value of the variable to persist to the cloud :param required_for_each_execution: Set to True if this variable is required for every execution to increase performance. Setting to True without using this variable on every execution may decrease performance. :param shared: True if this variable is shared with other bots within the location. Convenience method that forwards to save_shared_variable(). """ if name == CORE_VARIABLE_NAME and self.edge: # This bot is currently executing on the edge, do not attempt to save variables. return if shared: return self.save_shared_variable(name, value) # Correct a developer mistake, who previously put this variable in the Core Variables and then tried to save outside of the core variables if not self.edge: if CORE_VARIABLE_NAME in self.variables: if name in self.variables[CORE_VARIABLE_NAME]: required_for_each_execution = True self.save_variables({name: value}, required_for_each_execution) def save_variables( self, variables_dictionary, required_for_each_execution=False, shared=False ): """ This method will cache multiple variables from a dictionary to be saved to the cloud upon flush_variables() BotEngine will always flush variables to the cloud at the end of executing the bot. :param variables_dictionary: Dictionary of {name:value} variables to persist to the cloud :param required_for_each_execution: Set to True if this variable is required for every execution to increase performance. Setting to True without using this variable on every execution may decrease performance. """ if required_for_each_execution: self.variables[CORE_VARIABLE_NAME].update(variables_dictionary) self.variables_to_flush[CORE_VARIABLE_NAME] = self.variables[ CORE_VARIABLE_NAME ] else: self.variables_to_flush.update(variables_dictionary) self.variables.update(variables_dictionary) def load_variable(self, name, shared=False): """ Extract a single variable :param name: Name of the variable to load :param shared: True if this variable is shared with other bots within the location. Convenience method that forwards to load_shared_variable(). :return: the value of the given variable name """ self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine:load_variable() name={}".format(name) ) if shared: return self.load_shared_variable(name) if CORE_VARIABLE_NAME in self.variables: if name in self.variables[CORE_VARIABLE_NAME]: self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine:load_variable() core value={}".format( self.variables[CORE_VARIABLE_NAME][name] ) ) return self.variables[CORE_VARIABLE_NAME][name] if name in self.variables: self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine:load_variable() value={}".format(self.variables.get(name)) ) return self.variables.get(name) self._download_binary_variable(name) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine:load_variable() value={}".format(self.variables.get(name)) ) return self.variables.get(name) def load_variables(self, names): """ Download and return a list of variables :param names: List of variable names to download and return :return: Dictionary of variable names and values """ # TODO bring back the chunking mechanism to safely load large (>8MB) variables for name in names: if self.variables.get(name) is None: self._download_binary_variable(name) return_values = {} for name in names: return_values[name] = self.variables.get(name) return return_values def delete_variable(self, name, shared=False): """ Delete a variable from the cloud :param name: Name of the variable to delete at the cloud :param shared: True if this variable is shared with other bots within the location. Convenience method that forwards to delete_shared_variable(). """ if shared: return self.delete_shared_variable(name) self._http_delete("/analytic/variables/" + urllib.parse.quote((str(name)))) try: del self.variables[name] except KeyError: print(f"botengine:delete_variable() variable not found: {name}") pass try: del self.variables_to_flush[name] except KeyError: print(f"botengine:delete_variable() variable not found: {name}") pass def flush_binary_variables(self): """ This method will pickle and save multiple variables from the local variables cache to the cloud. It is always automatically called by the BotEngine at the end of bot execution. You do not need to call this manually. :param variables_dictionary: Dictionary of {name:value} variables to persist to the cloud According to https://docs.python.org/3/library/pickle.html, the following can be pickled: * None, True, and False * integers, floating point numbers, complex numbers * strings, bytes, bytearrays * tuples, lists, sets, and dictionaries containing only picklable objects * functions defined at the top level of a module (using def, not lambda) * built-in functions defined at the top level of a module * classes that are defined at the top level of a module * instances of such classes whose __dict__ or the result of calling __getstate__() is picklable (see section Pickling Class Instances for details). """ if self.get_bot_type() == BotEngine.BOT_TYPE_ORGANIZATION_RAG: # We don't want to download variables for organization RAG bots self.variables_to_flush.clear() return import dill if len(self.variables_to_flush) == 0: return # New method pickles = bytearray() params = "" # total_length is purely for information/debugging when running locally and has no impact on execution total_length = 0 for name in self.variables_to_flush: # Used for debugging variables stored to the server if self.inputs["trigger"] == 2048 and name == CORE_VARIABLE_NAME: self.get_logger(f"{'botengine'}.{__class__.__name__}").warning( "botengine: Attempted to store core variable during a data request trigger" ) continue if SAVE_VARIABLES_TO_DEBUG_FILE: with open("{}.variable".format(name), "wb") as f: v = dill.dumps(self.variables_to_flush[name]) f.write(v) self.get_logger(f"{'botengine'}.{__class__.__name__}").info( Color.BOLD + "{}: Saved {} bytes".format( "{}.variable".format(name), len(v) ) + Color.END ) try: v = dill.dumps(self.variables_to_flush[name]) except TypeError as e: # https://github.com/uqfoundation/dill/issues/58 # https://stackoverflow.com/questions/30499341/establishing-why-an-object-cant-be-pickled/30529992#30529992 # https://stackoverflow.com/questions/1218933/can-i-redirect-the-stdout-in-python-into-some-sort-of-string-buffer # Let's redirect stdout and get the trace from dill import traceback from io import StringIO sys.stdout = my_stdout = StringIO() dill.detect.trace(True) dill.detect.errors(self.variables_to_flush[name]) sys.stdout = sys.__stdout__ self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "botengine: Cannot flush variable {}. \n\ninputs={};\n\ndill.detect.trace stdout={};\n\ndill.detect.baditems()={};\n\ndill.detect.badobjects()={};\n\ndill.detect.badtypes()={};\n\nexception={};\n\ntraceback={}".format( name, self.inputs, my_stdout.getvalue(), dill.detect.baditems(self.variables_to_flush[name]), dill.detect.badobjects(self.variables_to_flush[name]), dill.detect.badtypes(self.variables_to_flush[name]), e, traceback.format_exc(), ) ) v = dill.dumps(None) pickles += v params += "name={}&length={}&".format(name, len(v)) # These next 2 lines are purely for information/debugging self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "< {}: Saved {} bytes".format(name, len(v)) ) total_length += len(v) self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "< Saving {} bytes total...".format(total_length) ) if total_length > 0: while True: r = None try: # self.get_logger(f"{'botengine'}.{__class__.__name__}").info(Color.BOLD + "Flushing: /analytic/variables?{}".format(params) + Color.END) headers = {"Content-Type": "application/octet-stream"} import hashlib md5 = hashlib.md5() md5.update(pickles) headers["Content-MD5"] = md5.digest().hex() r = self._http_post( "/analytic/variables?{}".format(params), data=pickles, headers=headers, timeout=15, ) j = json.loads(r.text) _check_for_errors(j) break except Exception as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "flush_binary_variables error: " + str(e) ) if r is not None: self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "flush_binary_variables response from server: " + r.text ) self.get_logger(f"{'botengine'}.{__class__.__name__}").info("< Saved.") self.variables_to_flush.clear() def save_shared_variable(self, name, value): """ A shared variable is one that is accessible by other bots within a Location. Examples of shared variables would be rules_engine functions and machine learning models. :param name: Name of the shared variable :param value: Value to store """ self.variables.update({name: value}) if self.playback: return import dill data = dill.dumps(value) self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "< {}: Saving {} bytes to shared variable".format(name, len(data)) ) params = {"shared": True} r = self._http_post( "/analytic/variables/{}".format(name), params=params, data=data, headers={"Content-Type": "application/octet-stream"}, timeout=15, ) j = json.loads(r.text) _check_for_errors(j) def load_shared_variable(self, name): """ Load a variable that has been shared by this bot or some other bot within this Location. :param name: Name of the variable to load :return: Variable value, or None if it doesn't exist """ self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine:load_shared_variable() name={}".format(name) ) if name in self.variables: self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine:load_shared_variable() value={}".format(self.variables[name]) ) return self.variables[name] if self.playback: return None if self.get_bot_type() == BotEngine.BOT_TYPE_ORGANIZATION_RAG: # Organization RAG bots cannot call APIs return None import dill while True: params = {"shared": True} r = self._http_get( "/analytic/variables/" + urllib.parse.quote(str(name)), params=params ) try: return dill.loads(r.content) except EOFError: # Don't show the error because this error will always happen on new bot instances for every variable self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "botengine.load_shared_variable(): EOFError in downloading variable {}".format( name ) ) return None except Exception as e: import traceback self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "BotEngine: load_shared_variable(): Unable to unpickle variable: {}. {}; {}".format( name, str(e), traceback.format_exc() ) ) return None def delete_shared_variable(self, name): """ Delete a variable from the cloud :param name: Name of the variable to delete at the cloud """ params = {"shared": True} self._http_delete( "/analytic/variables/" + urllib.parse.quote(str(name)), params=params ) def destroy_core_memory(self): """ Destructive action to forcefully make the bot forget everything and start over from scratch. This is primarily used when a bot is unpaused after a long time and we want to just start fresh. """ del self.variables[CORE_VARIABLE_NAME] self._reset_core_variable() def _download_core_variables_async(self, event): event.set() self._reset_core_variable() self._download_core_variables() event.clear() def _download_core_variables(self): """ Download and extract the core variables. This is to be called exactly once when the BotEngine class begins execution """ self._download_binary_variable(CORE_VARIABLE_NAME) self._reset_core_variable() def _reset_core_variable(self): """ Reset the core variable :return: """ if CORE_VARIABLE_NAME not in self.variables: self.variables[CORE_VARIABLE_NAME] = {} if self.variables[CORE_VARIABLE_NAME] is None: self.variables[CORE_VARIABLE_NAME] = {} if not isinstance(self.variables[CORE_VARIABLE_NAME], dict): self.variables[CORE_VARIABLE_NAME] = {} if TIMERS_VARIABLE_NAME not in self.variables[CORE_VARIABLE_NAME]: self.variables[CORE_VARIABLE_NAME][TIMERS_VARIABLE_NAME] = None if QUESTIONS_VARIABLE_NAME not in self.variables[CORE_VARIABLE_NAME]: self.variables[CORE_VARIABLE_NAME][QUESTIONS_VARIABLE_NAME] = None if COUNT_VARIABLE_NAME not in self.variables[CORE_VARIABLE_NAME]: self.variables[CORE_VARIABLE_NAME][COUNT_VARIABLE_NAME] = 0 def _validate_count(self): """ Validate the count and log an error if our count isn't correct """ if self.count is not None: if self._needs_resync(): error_string = ( "Expected trigger ID " + str(self.variables[CORE_VARIABLE_NAME][COUNT_VARIABLE_NAME] + 1) + " but got trigger ID " + str(self.count) + ". That's " + str( self.count - (self.variables[CORE_VARIABLE_NAME][COUNT_VARIABLE_NAME] + 1) ) + " missed triggers." ) self.get_logger(f"{'botengine'}.{__class__.__name__}").error( error_string ) if "sbox" in self._server: self.notify( email_content=error_string, email_subject="[sbox bot debugging] Missed trigger alert", ) self._save_count() def _needs_resync(self): """ :return: True if we need to resynchronize with the server because our trigger count is off """ return ( self.count > 0 and self.variables[CORE_VARIABLE_NAME][COUNT_VARIABLE_NAME] > 0 and (self.variables[CORE_VARIABLE_NAME][COUNT_VARIABLE_NAME] + 1) != self.count ) def _save_count(self): """ Save the trigger count Called explicitly because it adds computation """ self.save_variable( COUNT_VARIABLE_NAME, self.count, required_for_each_execution=True ) def _download_binary_variable(self, name, shared=False): """ Download a single binary variable """ if self.get_bot_type() == BotEngine.BOT_TYPE_ORGANIZATION_RAG: # We don't want to download variables for organization RAG bots return import dill while True: params = {"shared": shared} r = self._http_get( "/analytic/variables/" + urllib.parse.quote(str(name)), params=params ) # Used to debug variables loaded from the server, used in conjunction with debug code in the flush. # saved_var = None # if os.path.isfile('{}.variable'.format(name)): # with open('{}.variable'.format(name), 'rb') as f: # saved_var = f.read() # self.get_logger(f"{'botengine'}.{__class__.__name__}").info(Color.GREEN + "{}: Loaded {} bytes".format('{}.variable'.format(name), len(saved_var)) + Color.END) # self.get_logger(f"{'botengine'}.{__class__.__name__}").info(Color.GREEN + "{}: Downloaded {} bytes".format(name, len(r.content)) + Color.END) # # if saved_var is not None: # self.get_logger(f"{'botengine'}.{__class__.__name__}").info(Color.GREEN + "=> {} bytes difference for variable {}".format(abs(len(saved_var) - len(r.content)), name) + Color.END) # if saved_var == r.content: # self.get_logger(f"{'botengine'}.{__class__.__name__}").info(Color.GREEN + "=> Saved content is the same as downloaded content" + Color.END) # else: # self.get_logger(f"{'botengine'}.{__class__.__name__}").error(Color.RED + "=> Saved content is DIFFERENT than downloaded content" + Color.END) try: self.variables[name] = dill.loads(r.content) return except EOFError as e: # Don't show the error because this error will always happen on new bot instances for every variable self.variables[name] = None if r.status_code == 200: # Everything was okay, but our variable was corrupted if name == CORE_VARIABLE_NAME: self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "BotEngine Core Variable Reset: EOFError in _download_binary_variable: Error message {}".format( str(e) ) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "\tBotEngine Core Variable Reset: HTTP status: {}".format( str(r.status_code) ) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "\tBotEngine Core Variable Reset: Variable text: {}".format( str(r.text) ) ) self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "\tBotEngine Core Variable Reset: Variable content: {}".format( str(r.content) ) ) return elif r.status_code == 202: # No variable content on the server if name == CORE_VARIABLE_NAME: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "BotEngine Core Variable Reset: No variable on the server." ) return elif r.status_code == 204: # No variable content on the server if name == CORE_VARIABLE_NAME: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "BotEngine Core Variable Reset: No variable on the server." ) return else: # Bad status code, try again. time.sleep(0.5) continue except Exception as e: if r.status_code == 200: import traceback self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "BotEngine: _download_binary_variable(): Unable to unpickle variable: {}. {}; {}".format( name, str(e), traceback.format_exc() ) ) return elif r.status_code == 202: # No variable content on the server if name == CORE_VARIABLE_NAME: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "BotEngine Core Variable Reset: No variable on the server." ) return elif r.status_code == 204: # No variable content on the server if name == CORE_VARIABLE_NAME: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "BotEngine Core Variable Reset: No variable on the server." ) return else: # Bad status code, try again time.sleep(0.5) continue # =========================================================================== # Notifications - push, SMS, email # =========================================================================== def notify( self, push_title=None, push_subtitle=None, push_content=None, push_category=None, push_sound=None, push_sms_fallback_content=None, push_template_filename=None, push_template_model=None, push_info=None, email_subject=None, email_content=None, email_html=False, email_attachments=None, email_template_filename=None, email_template_model=None, email_addresses=None, sms_content=None, sms_template_filename=None, sms_template_model=None, sms_group_chat=True, device_message_device_id=None, device_message_title=None, device_message_text=None, device_message_from=None, device_message_duration=None, device_message_icon=None, device_message_muted=None, device_message_imageUrl=None, device_message_image=None, admin_domain_name=None, brand=None, language=None, user_id=None, user_id_list=None, to_residents=False, to_supporters=False, to_admins=False, device_message=None, ): """ This method sends a push or email notification to the people you selected. :param push_title: (optional) Push notification title (limited to the push notification service maximum message size) :param push_subtitle: (optional) Push notification subtitle (limited to the push notification service maximum message size) (iOS only) :param push_content: (optional) Push notification text (limited to the push notification service maximum message size) :param push_category: (optional) Push notification category (limited to the push notification service maximum message size) (iOS only) :param push_sound: (optional) Eg: "sound.wav" :param push_sms_fallback_content: Message content to deliver over SMS in case the push notification delivery fails :param push_template_filename: directoryName/PushTemplateName.vm. If this is used, the 'push_content' field is ignored. :param push_template_model: Dictionary of key/value pairs to inject into the push template. Dependent upon what the template itself understands. :param push_info: Extra key/value pairs in the push notification :param email_subject: (optional) Email subject line :param email_content: (optional) Email body :param email_html: (optional) True or False; default is False. :param email_template_filename: directoryName/EmailTemplateName.vm. If this is used, the 'email_content' and 'email_subject' fields are ignored. :param email_template_model: Dictionary of key/value pairs to inject into the email template. Dependent upon what the template itself understands. :param email_addresses: List of email addresses to deliver the message to. Your bot must support category 1 ("Specified Emails") email message delivery. :param sms_content: Content for an SMS message :param sms_template_filename: SMS template filename. If this is used, the 'sms_content' field is ignored :param sms_template_model: Dictionary of key/value pairs to inject into the sms template. Dependent upon what the template itself understands. :param sms_group_chat: True to send SMS messages as a group chat message instead of one-on-one individual messages. :param device_message_device_id: (optional) Device ID", :param device_message_title: Message title :param device_message_text: Message text :param device_message_from: (optional) From name :param device_message_duration: (optional) Message duration. default '60'. :param device_message_icon: (optional) Icon name :param device_message_muted: (optional) Mute the message. default 'false'. :param device_message_imageUrl: (optional) Image URL, :param device_message_image: (optional) base64 encoded image" :param brand: Case-sensitive brand for templates :param language: Language, for example 'en' :param user_id: (optional) Specific user ID to send to if the bot is running at the organizational level. :param user_id_list: (optional) Specific a list of user ID's to send to if the bot is running at the organizational level. :param to_residents: True to send the message to residents. :param to_supporters: True to send the message to supporters. :param to_admins: True to send the message to admins (email). :param admin_domain_name: Domain name / "short name" of the organization to send a notification to the admins :param debug: True to send a copy of the API call to developers """ if to_admins: import traceback self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "botengine: notify() called with to_admins=True, which is no longer supported. Please update your code: {}".format( traceback.format_exc() ) ) notifications = {} user_declared = False if to_residents: if "userCategories" not in notifications: notifications["userCategories"] = [] user_declared = True notifications["userCategories"].append(1) if to_supporters: if "userCategories" not in notifications: notifications["userCategories"] = [] user_declared = True notifications["userCategories"].append(2) if user_id is not None: if "users" not in notifications: notifications["users"] = [] user_declared = True notifications["users"].append(user_id) if user_id_list is not None: if "users" not in notifications: notifications["users"] = [] user_declared = True notifications["users"] += user_id_list if email_addresses is not None: if len(email_addresses) > 0: user_declared = True if not user_declared: if "userCategories" not in notifications: notifications["userCategories"] = [] notifications["userCategories"].append(1) if brand is not None: notifications["brand"] = brand if language is not None: notifications["language"] = language send = False # The notification type is included in a push notification to the app to let the app know how to render it. # It is also included in the GET Notifications API available to apps, which I don't believe are being used anymore. # Type 4 says, "This message came from the bot." notifications["type"] = 4 if sms_content is not None or sms_template_filename is not None: send = True notifications["smsMessage"] = {} notifications["smsMessage"]["individual"] = not sms_group_chat if sms_content: notifications["smsMessage"]["content"] = sms_content if sms_template_filename: notifications["smsMessage"]["template"] = sms_template_filename if sms_template_model: notifications["smsMessage"]["model"] = sms_template_model if ( push_content is not None or push_template_filename is not None or push_info is not None ): send = True notifications["pushMessage"] = {} if push_sms_fallback_content is not None: notifications["pushMessage"]["smsContent"] = push_sms_fallback_content if push_title is not None: notifications["pushMessage"]["title"] = push_title if push_subtitle is not None: notifications["pushMessage"]["subtitle"] = push_subtitle if push_content is not None: notifications["pushMessage"]["content"] = push_content if push_category is not None: notifications["pushMessage"]["category"] = push_category if push_sound is not None: notifications["pushMessage"]["sound"] = push_sound if push_template_filename is not None: notifications["pushMessage"]["template"] = push_template_filename if push_template_model is not None: notifications["pushMessage"]["model"] = push_template_model if push_info is not None: notifications["pushMessage"]["info"] = push_info if email_content is not None or email_template_filename is not None: send = True notifications["emailMessage"] = {} notifications["emailMessage"]["html"] = email_html if email_subject is not None: notifications["emailMessage"]["subject"] = email_subject if email_content is not None: notifications["emailMessage"]["content"] = email_content if email_template_filename is not None: notifications["emailMessage"]["template"] = email_template_filename if email_template_model is not None: notifications["emailMessage"]["model"] = email_template_model if email_attachments is not None: notifications["emailMessage"]["attachments"] = email_attachments if email_addresses is not None: notifications["emailMessage"]["recipients"] = email_addresses if device_message_text is not None: send = True notifications["deviceMessage"] = {} notifications["deviceMessage"]["text"] = device_message_text notifications["deviceMessage"]["duration"] = device_message_duration or 60 if device_message_device_id is not None: notifications["deviceMessage"]["deviceId"] = device_message_device_id if device_message_title is not None: notifications["deviceMessage"]["title"] = device_message_title if device_message_from is not None: notifications["deviceMessage"]["from"] = device_message_from if device_message_icon is not None: notifications["deviceMessage"]["icon"] = device_message_icon if device_message_muted is not None: notifications["deviceMessage"]["muted"] = device_message_muted if device_message_imageUrl is not None: notifications["deviceMessage"]["imageUrl"] = device_message_imageUrl if device_message_image is not None: notifications["deviceMessage"]["image"] = device_message_image if send: j = json.dumps(notifications) self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Notification content: {}".format( json.dumps(notifications, sort_keys=True) ) ) location_id = self.get_location_id() # Location Notification API # https://iotbots.docs.apiary.io/#reference/bot-server-apis/send-notification-to-location-users/send-a-notification r = self._http_post( "/analytic/location/{}/notifications".format(location_id), data=j ) j = json.loads(r.text) # ======================================================================= # print("Params: " + str(params)) # print("Data: " + str(j)) # self.get_logger(f"{'botengine'}.{__class__.__name__}").info("RESPONSE: " + str(r.text)) # ======================================================================= # This was barfing on us when sending a push notification to the end user in a bot that requests access to an organization when the user is not part of any organization try: _check_for_errors(j) except BotError: # _bot_loggers["botengine"].error("BotEngine SMS notify(): " + e.msg) # _bot_loggers["botengine"].error("Notification data: " + str(notifications)) return def send_mms( self, user_id, media_content=None, url=None, media_type=1, ext=None, caption=None, ): """ Send an image or audio file to the user's phone. :param user_id: User ID :param media_content: File content :param url: Image url :param media_type: Media type (1 - image, 2 - audio) :param ext: Ext :param caption: Additional text to accompany the media file. :return: """ headers = {} params = {"userId": user_id, "mediaType": media_type} if caption is not None: params["caption"] = caption if ext is not None: params["ext"] = ext if url is not None: content = "application/octet-stream" headers = {"Content-Type": content} params["url"] = url r = self._http_post("/analytic/mms", params=params, headers=headers) j = json.loads(r.text) _check_for_errors(j) else: content = "" if media_type == 1: content = "image/" + ext else: content = "application/octet-stream" headers = {"Content-Type": content} r = self._http_post( "/analytic/mms", params=params, data=media_content, headers=headers ) j = json.loads(r.text) _check_for_errors(j) def make_voice_call(self, user_id, voice_model, call_time=None): """ Define the voice call model for specific user. :param user_id: User ID :param voice_model: Voice call model :param call_time: Call start time, in milliseconds since the epoch :return: """ body = {"model": voice_model} params = {"userId": user_id} if call_time is not None: params["callTime"] = call_time self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine: make_voice_call() params={} body={}".format( json.dumps(params), json.dumps(body) ) ) if self.playback: return r = self._http_post("/analytic/voiceCall", params=params, data=json.dumps(body)) j = json.loads(r.text) _check_for_errors(j) def set_incoming_voicecall(self, user_id, voice_model): """ Define the incoming voice call model for specific users at the bot's location. :param user_id: :param voice_model: Voice call model :return: """ body = {"model": voice_model} params = {"userId": user_id} self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine: set_incoming_voicecall() params={} body={}".format( json.dumps(params), json.dumps(body) ) ) r = self._http_post( "/analytic/voiceCallAnswer", params=params, data=json.dumps(body) ) j = json.loads(r.text) _check_for_errors(j) def delete_incoming_voicecall(self, user_id): """ Delete the incoming voice call model for specific users at the bot's location. :param user_id: :return: """ params = {"userId": user_id} self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "botengine: delete_incoming_voicecall() params={}".format( json.dumps(params) ) ) r = self._http_delete("/analytic/voiceCallAnswer", params=params) j = json.loads(r.text) _check_for_errors(j) def email_admins( self, email_subject=None, email_content=None, email_html=False, email_attachments=None, email_template_filename=None, email_template_model=None, email_addresses=None, brand=None, categories=[1, 2], ): """ Send an email to administrators :param botengine: :param email_subject: :param email_content: :param email_html: :param email_attachments: :param email_template_filename: :param email_template_model: :param email_addresses: List of email addresses :param categories: List of Organization User Categories; 1 = Manager, 2 = Technician, 3 = Billing :return: """ self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( ">email_admins() email_subject={} email_content={} email_html={} email_attachments={} email_template_filename={} email_template_model={} email_addresses={} brand={} categories={}".format( email_subject, email_content, email_html, email_attachments, email_template_filename, email_template_model, email_addresses, brand, categories, ) ) body = {} params = {} if brand is not None: body["brand"] = brand if email_content is not None or email_template_filename is not None: body["emailMessage"] = {} body["emailMessage"]["html"] = email_html if email_subject is not None: body["emailMessage"]["subject"] = email_subject if email_content is not None: body["emailMessage"]["content"] = email_content if email_template_filename is not None: body["emailMessage"]["template"] = email_template_filename if email_template_model is not None: body["emailMessage"]["model"] = email_template_model if email_attachments is not None: body["emailMessage"]["attachments"] = email_attachments if email_addresses is not None: # Email to selected recipients body["emailMessage"]["recipients"] = email_addresses # Notify emails directly params = {"category": 1} else: body["userCategories"] = categories # Notify Organization Users params = {"category": 2} else: return if self.playback: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "" to reference this attachment. """ attachment = {"deviceFileId": device_file_id, "contentId": content_id} destination_attachment_array.append(attachment) return destination_attachment_array def add_email_attachment( self, destination_attachment_array, filename, content, content_type, content_id ): """ This helper method will create an email attachment block and add it to a destination array of existing email attachments :param destination_attachment_array: Destination array of attachments. Pass in [] if you are starting a new list of attachments. :param filename: Filename of the file, for example, "imageName.jpg" :param content: Content to attached, for example base64-encoded binary image content :param content_type: Content type of the file, for example "image/jpeg" :param content_id: Unique ID for the content, for example "inlineImageId". The email can reference this content with . :return the destination_attachment_array with the new attachment, ready to pass into the email_attachments argument in the notify() method """ attachment = { "name": filename, "content": content, "contentType": content_type, "contentId": content_id, } destination_attachment_array.append(attachment) return destination_attachment_array # =========================================================================== # Measurements # =========================================================================== def get_measurements( self, device_id, user_id=None, oldest_timestamp_ms=None, newest_timestamp_ms=None, param_name=None, index=None, last_rows=None, ): """ This method will return measurements from the given device * param_name[*] --> getting current measurement * start_date, end_date[?], param_name[*], index[?] --> getting historical measurements * start_date, end_date[?], param_name[*], index[?], last_count --> getting last n measurements :param device_id: Device ID to extract parameters from :param user_id: User ID to access devices of specific user by an organization bot :param oldest_timestamp_ms: Start time in milliseconds to begin receiving measurements. If not set, only latest measurements will be returned. e.g. 1483246800000 :param newest_timestamp_ms: End time in milliseconds to stop receiving measurements, default is the current time. e.g. 1483246800000 :param param_name: Only obtain measurements for given parameter names. Multiple values can be passed, example: "batteryLevel" or ["batteryLevel", "doorStatus"] :param index: Only obtain measurements for parameters with this index number. :param last_rows: Receive only last N measurements """ if newest_timestamp_ms is None: newest_timestamp_ms = self.get_timestamp() original_oldest_timestamp_ms = oldest_timestamp_ms params = {} if user_id: params["userId"] = int(user_id) if param_name: params["paramName"] = param_name if index: params["index"] = index if last_rows: params["lastRows"] = last_rows if newest_timestamp_ms: params["endDate"] = int(newest_timestamp_ms) if oldest_timestamp_ms is None: r = self._http_get( "/analytic/devices/" + device_id + "/parameters", params=params, timeout=120, ) j = json.loads(r.text) _check_for_errors(j) return j else: # Extract the data from the server in calendar month API chunks # The parameters history table has monthly partitions. # This operation works the fastest when you select data by exactly the first microsecond of the month to the first microsecond of the next month in UTC. import dateutil.relativedelta # print("get_measurements(): original_oldest_timestamp_ms={}; newest_timestamp_ms={}".format(original_oldest_timestamp_ms, newest_timestamp_ms)) oldest_dt = datetime.datetime.utcfromtimestamp( newest_timestamp_ms / 1000 ).replace(day=1, hour=0, minute=0, second=0, microsecond=0) return_json = {"measures": []} while newest_timestamp_ms > original_oldest_timestamp_ms: oldest_timestamp_ms = ( (oldest_dt - datetime.datetime(1970, 1, 1)).total_seconds() ) * 1000 if oldest_timestamp_ms < original_oldest_timestamp_ms: oldest_timestamp_ms = original_oldest_timestamp_ms params["startDate"] = int(oldest_timestamp_ms) params["endDate"] = int(newest_timestamp_ms) # print("get_measurements(): start={} end={}".format(int(oldest_timestamp_ms), int(newest_timestamp_ms))) r = self._http_get( "/analytic/devices/" + device_id + "/parameters", params=params, timeout=240, ) j = json.loads(r.text) _check_for_errors(j) if "measures" not in j: # Ran out of measurements break elif len(j["measures"]) == 0: # Ran out of measurements break # Insert the most recently downloaded set of measurements into the front of our total array return_json["measures"][0:0] = j["measures"] # Inch our way backwards to the previous calendar month newest_timestamp_ms = oldest_timestamp_ms oldest_dt = oldest_dt + dateutil.relativedelta.relativedelta(months=-1) return return_json def request_data( self, type=1, device_id=None, oldest_timestamp_ms=None, newest_timestamp_ms=None, param_name_list=None, reference=None, index=None, ordered=1, ): """ Selecting a large amount of data from the database can take a significant amount of time and impact server performance. To avoid this long waiting period while executing bots, a bot can submit a request for all the data it wants from this location asynchronously. The server gathers all the data on its own time, and then triggers the bot with trigger 2048. Your bot must include trigger 2048 to receive the trigger. Selected data becomes available as a file in CSV format, compressed by LZ4, and stored for one day. The bot receives direct access to this file. You can call this multiple times to extract data out of multiple devices. The request will be queued up and the complete set of requests will be flushed at the end of this bot execution. :param type: DATA_REQUEST_TYPE_*, default (1) is key/value device parameters :param device_id: Device ID to download historical data from :param oldest_timestamp_ms: Oldest timestamp in milliseconds :param newest_timestamp_ms: Newest timestamp in milliseconds :param param_name_list: List of parameter names to download :param reference: Reference so when this returns we know who it's for :param index: Index to download when parameters are available with multiple indices :param ordered: 1=Ascending (default); -1=Descending. """ self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Requesting data from {} to {} for device {} with param_name_list={}".format( oldest_timestamp_ms, newest_timestamp_ms, device_id, param_name_list ) ) request = {"type": type} if device_id is not None: request["deviceId"] = device_id if oldest_timestamp_ms is not None: request["startTime"] = oldest_timestamp_ms else: # Go back a maximum 1 millisecond less than a year ago. request["startTime"] = self.get_timestamp() - 31535999999 if newest_timestamp_ms is not None: request["endTime"] = newest_timestamp_ms else: request["endTime"] = self.get_timestamp() if param_name_list is not None: request["paramNames"] = param_name_list if reference is not None: request["key"] = reference if index is not None: request["index"] = index if ordered is not None: request["ordered"] = ordered self.data_requests.append(request) def flush_asynchronous_requests(self): """ Flush the complete set of asynchronous measurement requests to the server """ if len(self.data_requests) == 0: return if self.playback: # Data requests are inserted into playback queue as single trigger after to the current execution return j = json.dumps({"dataRequests": self.data_requests}) r = self._http_post("/analytic/dataRequests", data=j) j = json.loads(r.text) try: _check_for_errors(j) except BotError: _bot_loggers["botengine"].warn( "Error sending asynchronous measurement requests. We sent this body: {}".format( json.dumps({"dataRequests": self.data_requests}, sort_keys=True) ) ) self.data_requests = [] def send_data_request(self, url, timeout, stream=False): """ Flush the complete set of asynchronous measurement requests to the server """ while True: try: r = self.session.get(url, timeout=timeout, stream=stream) return r except self._requests.HTTPError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP error calling GET " + url ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) except self._requests.ConnectionError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Connection HTTP error calling GET " + url ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) except self._requests.Timeout as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( str(timeout) + " second HTTP Timeout calling POST " + url ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) timeout += 10 if timeout >= 30: raise self._requests.Timeout() except self._requests.TooManyRedirects as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Too many redirects HTTP error calling GET " + url ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) except Exception as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Generic HTTP exception calling GET " + url ) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "Error: {}".format(e) ) # =========================================================================== # Device Properties # =========================================================================== def set_device_property(self, device_id, name, value, index=None): """ Set a single device property from your location https://iotapps.docs.apiary.io/#reference/devices/device-activation-info/set-device-properties :param device_id: Device ID :param properties: Device properties {"property": [{"name":"size", "value":"10"}, {xxx}]} """ params = {"locationId": self.get_location_id()} property = {"name": name, "value": value} if index is not None: property["index"] = index body = {"property": [property]} self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Saving device property to {}: \n{}".format( device_id, json.dumps(body, sort_keys=True) ) ) self._http_post( "/cloud/json/devices/{}/properties".format(device_id), params=params, data=json.dumps(body), ) def get_device_property(self, device_id, name=None, index=None): """ Get device properties from your location https://iotapps.docs.apiary.io/#reference/devices/device-properties/get-device-properties :param device_id: Device ID :param name: Optional name to search for :param index: Optional index to search for """ params = {"locationId": self.get_location_id()} if name is not None: params["name"] = name if index is not None: params["index"] = index r = self._http_get( "/cloud/json/devices/{}/properties".format(device_id), params=params ) j = json.loads(r.text) _check_for_errors(j) if "properties" in j: return j["properties"] return [] def delete_device_property(self, device_id, name, index=None): """ Delete device properties from your location https://iotapps.docs.apiary.io/#reference/devices/device-properties/get-device-properties :param device_id: Device ID :param property_name: Property name """ params = {"locationId": self.get_location_id(), "name": name} if index is not None: params["index"] = index self._http_delete( "/cloud/json/devices/{}/properties".format(device_id), params=params ) # =========================================================================== # Commands # =========================================================================== def send_command( self, device_id, param_name, value, index=None, command_timeout_ms=None, comment=None, ): """ This method sends a command to the device ID :param device_id: The exact device ID to send a command to. This is case-sensitive. :param param_name: The name of the parameter to configure. :param value: The value to set for this parameter. :param index: Optional index number / letters. Default is None. :param comment: Reason why this command was sent """ self.send_commands( device_id, self.form_command(param_name=param_name, index=index, value=value), command_timeout_ms=command_timeout_ms, comment=comment, ) def form_command(self, param_name, value, index=None): """ This method will form a single command. You can pass in parameter name / optional index / value pairs and it will generate a dictionary to represent this command. This is a shortcut to send multiple commands with the send_commands(device_id, commands) method. Append multiple commands into a list and then use send_commands(..) to send them all. :param param_name: The name of the parameter to configure. :param value: The value to set for this parameter. :param index: Optional index number / letters. Default is None. """ response = {"name": param_name, "value": value} if index: response["index"] = index return response def send_commands(self, device_id, commands, command_timeout_ms=None, comment=None): """ This method sends one or multiple commands simultaneously to the given device ID 'index' is optional - if your parameter does not use an index number, do not reference or populate it. :param device_id: The exact device ID to send a command to. This is case-sensitive. :param commands: Array of dictionaries of the form [{"name":"parameterName", "index":0, "value":"parameterValue"}, ...] :param command_timeout_ms: Relative timeout, in ms, to expire the command :param comment: Reason why this command was sent """ commands_for_device = {"deviceId": device_id} exists = False for d in self.commands_to_flush: if d["deviceId"] == device_id: commands_for_device = d exists = True break if "params" not in commands_for_device: commands_for_device["params"] = [] if command_timeout_ms is not None: commands_for_device["commandTimeout"] = command_timeout_ms if comment is not None: commands_for_device["comment"] = comment if not isinstance(commands, list): commands = [commands] import copy for command in commands: for param in copy.copy(commands_for_device["params"]): if param["name"] == command["name"]: if "index" in param and "index" in command: if param["index"] == command["index"]: # match commands_for_device["params"].remove(param) break else: # match commands_for_device["params"].remove(param) break commands_for_device["params"].append(command) if not exists: self.commands_to_flush.append(commands_for_device) def cancel_command(self, device_id, param_name=None): """ Cancel a command to the device with the given parameter names. If no parameter name is given, this will cancel all commands to the device. :param device_id: Device ID to cancel commands for :param param_name: Parameter name to cancel commands for. Leave this None (default) to cancel all commands to the device. """ import copy for d in copy.copy(self.commands_to_flush): if d["deviceId"] == device_id: if param_name is None: # Delete all commands to this device self.commands_to_flush.remove(d) return else: if "params" in d: for param in copy.copy(d["params"]): if param["name"] == param_name: d["params"].remove(param) if len(d["params"]) == 0: self.commands_to_flush.remove(d) def flush_commands(self): """ https://iotbots.docs.apiary.io/#reference/bot-server-apis/multiple-device-commands/send-set-commands Flush all the commands to the server and execute them. This is called automatically when the bot exits, you should never have to call this manually. """ if len(self.commands_to_flush) > 0: body = {"devices": []} for d in self.commands_to_flush: body["devices"].append(d) _bot_loggers["botengine"].info( "Sending commands: " + json.dumps(body, sort_keys=True) ) r = self._http_put("/analytic/parameters", data=json.dumps(body)) j = json.loads(r.text) _bot_loggers["botengine"].info( "Command responses: " + json.dumps(j, sort_keys=True) ) self.commands_to_flush = [] # =========================================================================== # Modes # =========================================================================== def set_mode(self, location_id, mode, comment=None): """ Set the mode :param location_id: Location ID of which to set the mode :param mode: Mode string to set, for example "AWAY" or "AWAY.SILENT" :param comment: Optional comment to describe why this mode changed. """ data = None if comment is not None: data = json.dumps({"comment": comment}) self._http_post( "/cloud/json/location/" + str(location_id) + "/event/" + str(mode), data=data, ) def get_mode_history( self, location_id, oldest_timestamp_ms=None, newest_timestamp_ms=None ): """ This method will return location mode history in backward order (lastest first) Including the source of the mode change :param location_id: Location ID :param oldest_timestamp_ms: Oldest timestamp to start pulling history :param newest_timestamp_ms: Newest timestamp to stop pulling history """ params = {} if oldest_timestamp_ms is not None: params["startDate"] = int(oldest_timestamp_ms) if newest_timestamp_ms is not None: params["endDate"] = int(newest_timestamp_ms) r = self._http_get( "/analytic/location/" + str(location_id) + "/events", params=params ) j = json.loads(r.text) _check_for_errors(j) return j def get_mode(self, location_id): """ Get the current mode :param location_id: Location ID to retrieve the mode for :return: The current mode, or "HOME.DEFAULT" by default if the location can't be found """ location = self.get_location_info() if location is not None: if "location" in location: if "event" in location["location"]: return location["location"]["event"] return "HOME.DEFAULT" # =========================================================================== # Files # =========================================================================== def download_file(self, file_id, local_filename, thumbnail=False): """ Download a file :param file_id: File ID to download :param local_filename: Local filename to store the file into :param thumbnail: True to download the thumbnail for this file :return: local_filename """ params = {"thumbnail": thumbnail} r = self._http_get( "/cloud/json/files/{}".format(file_id), params=params, stream=True ) with open(local_filename, "wb") as f: for chunk in r.iter_content(chunk_size=1024): # filter out keep-alive new chunks if chunk: f.write(chunk) return local_filename # =========================================================================== # Subscription Services # =========================================================================== def has_subscription(self, name): """ Search for the given name inside our list of available. For example if we search for the name "Avantguard" and the location has a subscription "Avantguard.PPC" then this will return True. :return: True if this location has the given name String found inside any available subscription """ # ===================================================================== # Example of self.services content: # self.services = [ # { # "amount": 1, # "endDateMs": 1495174080000, # "serviceName": "ProEnergy", # "startDateMs": 1494569299000 # } # ] # ===================================================================== if name is not None and self.services is not None: for service in self.services: if name in service["serviceName"]: return True return False # =========================================================================== # Professional Monitoring Services # =========================================================================== def has_professional_monitoring(self): """ :return: True if this user has professional monitoring services """ try: professional_monitoring = self.professional_monitoring_status() return ( professional_monitoring["callCenter"]["status"] == BotEngine.PROFESSIONAL_MONITORING_REGISTERED ) except Exception as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "Error checking professional monitoring status: {}".format(e) ) return False return False def professional_monitoring_status(self): """ :return: call center service statuses """ if self.playback: status = { "callCenter": { "alertDateMs": 0, "alertStatus": 0, "alertStatusDateMs": 0, "status": 3, } } return status r = self._http_get("/analytic/callCenter") j = json.loads(r.text) _check_for_errors(j) return j def professional_monitoring_alerts(self): """ :return: call center service alerts """ r = self._http_get("/analytic/callCenterAlerts") j = json.loads(r.text) _check_for_errors(j) return j def raise_professional_monitoring_alert( self, message, code, device_id=None, latitude=None, longitude=None ): """ Raise an alert to the professional monitoring services :param message: signal message :param code: E130 - General burglary alarm E131 - Perimeter alarm (door/window entry sensor) E132 - Interior alarm (motion sensor) E134 - Entry/exit alarm (more specific than E131, but I'm not sure we can declare that our door/window sensors will always be used on an entry/exit) E154 - Water leak E111 - Smoke alarm (future) E114 - Heat alarm (future - analytics on temperature sensors above the stove, for example) E158 - Environmental high temperature alarm (temperature sensor) E159 - Environmental low temperature alarm (temperature sensor) E100 - General medical alarm (future - Personal Emergency Reporting System (PERS) button) E108 - Verify contact information :param device_id: device ID :param latitude: Optional latitude for mobile events :param longitude: Optional longitude for mobile events """ return self._signal_professional_monitoring( 1, message, code, device_id, latitude=latitude, longitude=longitude ) def cancel_professional_monitoring_alert(self, message, code, device_id=None): """ Cancel an alert to the professional monitoring services :param message: signal message :param code: E130 - General burglary alarm E131 - Perimeter alarm (door/window entry sensor) E132 - Interior alarm (motion sensor) E134 - Entry/exit alarm (more specific than E131, but I'm not sure we can declare that our door/window sensors will always be used on an entry/exit) E154 - Water leak E111 - Smoke alarm (future) E114 - Heat alarm (future - analytics on temperature sensors above the stove, for example) E158 - Environmental high temperature alarm (temperature sensor) E159 - Environmental low temperature alarm (temperature sensor) E100 - General medical alarm (future - Personal Emergency Reporting System (PERS) button) :param device_id: device ID """ return self._signal_professional_monitoring(2, message, code, device_id) def _signal_professional_monitoring( self, alert_status, message, code, device_id=None, latitude=None, longitude=None ): """ This method can change the current alert status to raise or cancel it :param alert_status: 0 An alert was never raised 1 Raise an alert 2 Cancel an alert 3 Not available to set - The alert was reported to the professional monitoring services :param message: signal message :param code: E130 - General burglary alarm E131 - Perimeter alarm (door/window entry sensor) E132 - Interior alarm (motion sensor) E134 - Entry/exit alarm (more specific than E131, but I'm not sure we can declare that our door/window sensors will always be used on an entry/exit) E154 - Water leak E111 - Smoke alarm (future) E114 - Heat alarm (future - analytics on temperature sensors above the stove, for example) E158 - Environmental high temperature alarm (temperature sensor) E159 - Environmental low temperature alarm (temperature sensor) E100 - General medical alarm (future - Personal Emergency Reporting System (PERS) button) :param device_id: device ID :param latitude: Optional latitude for mobile events :param longitude: Optional longitude for mobile events """ body = {"alertStatus": alert_status, "signalMessage": message} if code: body["signalType"] = code if device_id: body["deviceId"] = device_id if latitude is not None: body["latitude"] = str(latitude) if longitude is not None: body["longitude"] = str(longitude) r = self._http_put( "/analytic/callCenter", data=json.dumps({"callCenter": body}) ) j = json.loads(r.text) return j # =========================================================================== # Tags # =========================================================================== def tag_user(self, tag, user_id=None): """ Tag a user :param tag: The tag to give the user """ self._tag(1, tag, user_id) def tag_location(self, tag): """ Tag a location :param tag: The tag to give the location :param location_id: The location ID to tag """ self._tag(2, tag, self.get_location_id()) def tag_device(self, tag, device_id, user_id=None): """ Tag a device :param tag: The tag to give the device :param device_id: The device ID to tag """ self._tag(3, tag, device_id, user_id) def tag_file(self, tag, file_id, user_id=None): """ Tag a file :param tag: The tag to give the file :param file_id: The file ID to tag """ self._tag(4, tag, file_id, user_id) def delete_user_tag(self, tag, user_id=None): """ Delete a user tag :param tag: Tag to delete """ self._delete_tag(1, tag) def delete_location_tag(self, tag): """ Delete a location tag :param tag: Tag to delete """ self._delete_tag(2, tag, self.get_location_id()) def delete_device_tag(self, tag, device_id): """ Delete a location device :param tag: Tag to delete """ self._delete_tag(3, tag, device_id) def delete_file_tag(self, tag, file_id): """ Delete a location file :param tag: Tag to delete """ self._delete_tag(4, tag, file_id) def get_location_tags(self): """ Get Location tags :return: """ tags = self.get_tags(tag_type=self.TAG_TYPE_LOCATIONS) tag_list = [] for tag_object in tags: tag_list.append(tag_object["tag"]) return tag_list def get_tags(self, tag_type=None, tag_id=None, user_id=None): """ Get tags :param tag_type: Optional, filter by type: 1 - Users 2 - Locations 3 - Devices 4 - Files :param tag_id: Optional, filter by location ID, device ID, or file ID :param user_id: Used with Organizational Apps - confine tags to a specific user """ if self.playback: return [] params = {} if user_id is not None: params["userId"] = user_id if tag_type is not None: params["type"] = tag_type if tag_id is not None: params["id"] = tag_id r = self._http_get("/analytic/tags", params=params) j = json.loads(r.text) _check_for_errors(j) if "tags" in j: return j["tags"] return [] def _tag(self, tag_type, tag, tag_id=None, user_id=None): """Private method to tag users, devices, locations, files :param tag_type: 1 - User 2 - Location 3 - Device 4 - Files :param tag: Tag to give the object :param tag_id: Location ID, Device ID, or File ID """ if " " in tag: return "Error: Tags cannot have any spaces" if "#" in tag: return "Error: Tags cannot have any # signs" if "@" in tag: return "Error: Tags cannot have any @ signs" tag_block = {"type": tag_type, "tag": tag} if tag_id is not None: tag_block["id"] = tag_id if user_id is not None: if user_id not in self.tags_to_create_by_user: self.tags_to_create_by_user[user_id] = [] self.tags_to_create_by_user[user_id].append(tag_block) else: self.tags_to_create.append(tag_block) def _delete_tag(self, tag_type, tag, tag_id=None, user_id=None): """Delete a tag :param tag_type: 1 - User 2 - Location 3 - Device 4 - Files :param tag: Tag to delete :param tag_id: Location ID, Device ID, or File ID """ tag_block = {"type": tag_type, "tag": tag} if tag_id is not None: tag_block["id"] = tag_id if user_id is not None: if user_id not in self.tags_to_delete_by_user: self.tags_to_delete_by_user[user_id] = [] self.tags_to_delete_by_user[user_id].append(tag_block) else: self.tags_to_delete.append(tag_block) def flush_tags(self): """ Flush the new and deleted tags to the server, This is called automatically when the bot is finished executing. It should never have to be called manually. """ # Create tags - single API call if len(self.tags_to_create) > 0: j = json.dumps({"tags": self.tags_to_create}) r = self._http_put("/analytic/tags", data=j) j = json.loads(r.text) try: _check_for_errors(j) except BotError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").error( e.msg + "; data=" + str(j) ) # Delete tags - individual API calls for params in self.tags_to_delete: r = self._http_delete("/analytic/tags", params=params) j = json.loads(r.text) # Do not check for errors, because most errors are likely the # admin manually deleting a tag and we don't want to kill the bot. # Organizational tags to create - individual API calls for each user for user_id in self.tags_to_create_by_user: params = {"userId": user_id} j = json.dumps({"tags": self.tags_to_create_by_user[user_id]}) r = self._http_put("/analytic/tags", params=params, data=j) j = json.loads(r.text) try: _check_for_errors(j) except BotError as e: self.get_logger(f"{'botengine'}.{__class__.__name__}").error( e.msg + "; user_id=" + str(user_id) + "; data=" + str(j) ) # Organizational tags to delete - individual API calls for each tag for user_id in self.tags_to_delete_by_user: for params in self.tags_to_delete_by_user[user_id]: params["userId"] = user_id r = self._http_delete("/analytic/tags", params=params) # =========================================================================== # Location Settings # =========================================================================== def set_location_priority(self, category, rank, comment=None): """ Set the prioritized category of human attention needed for this location. 0 = This location has no devices. 1 = Everything is running okay 2 = This location is learning 3 = Incomplete installation (of devices, people, etc.) 4 = System problem (offline devices, low battery, abnormal device behaviors, etc.) 5 = Subjective warning (abnormal trends, sleeping too much, etc.) 6 = Critical alert (falls, didn't wake up, water leak, etc.) :param category: Priority from 0 - 6 where 6 requires the most human attention :param rank: Rank from 100% good to 0% good to compare homes within a specific category :param comment: Human-understandabld description of why the location has this priority :return: """ body = { "location": { "priorityCategory": int(category), "priorityRank": int(rank), "priorityComment": comment, } } self._http_put( "/analytic/location/{}".format(self.get_location_id()), data=json.dumps(body), ) def get_spaces(self): """ Get a list of spaces for this location https://iotapps.docs.apiary.io/#reference/locations/location-spaces/get-spaces :return: List of spaces """ r = self._http_get( "/cloud/json/location/{}/spaces".format(self.get_location_id()) ) j = json.loads(r.text) _check_for_errors(j) if "spaces" in j: return j["spaces"] return [] def set_space(self, space_type, name, space_id=None): """ Add / Update a space https://iotapps.docs.apiary.io/#reference/locations/location-spaces/update-space :param space_type: Type of space :param name: Name of space :param space_id: Space ID to update an existing space definition """ params = {} if space_id is not None: params["spaceId"] = space_id body = {"space": {"type": space_type, "name": name}} self._http_post( "/cloud/json/location/{}/spaces".format(self.get_location_id()), params=params, data=json.dumps(body), ) def delete_space(self, space_id): """ Delete a space https://iotapps.docs.apiary.io/#reference/locations/location-spaces/delete-space :param space_id: Space ID to delete """ params = {"spaceId": space_id} r = self._http_delete( "/cloud/json/location/{}/spaces".format(self.get_location_id()), params=params, ) _check_for_errors(json.loads(r.text)) def add_occupancy(self, occupancy): """ Add occupancy https://iotapps.docs.apiary.io/#reference/locations/location-occupancy/add-occupancy :param occupancy: Bitmask mark that locations is occupied or vacant: 0 - none or no data 1 - managed 2 - measured """ r = self._http_post( "/cloud/json/location/{}/occupancy/{}".format( self.get_location_id(), occupancy ) ) _check_for_errors(json.loads(r.text)) def delete_occupancy(self, occupancy): """ Delete occupancy https://iotapps.docs.apiary.io/#reference/locations/location-occupancy/remove-occupancy :param occupancy: Bitmask mark that locations is occupied or vacant: 0 - none or no data 1 - managed 2 - measured """ r = self._http_delete( "/cloud/json/location/{}/occupancy/{}".format( self.get_location_id(), occupancy ) ) _check_for_errors(json.loads(r.text)) # =========================================================================== # Weather # =========================================================================== def get_weather_forecast_by_geocode( self, latitude, longitude, units=None, hours=12 ): """ Get the weather forecast by geocode (latitude, longitude) :param latitude: Latitude :param longitude: Longitude :param units: Default is Metric. 'e'=English; 'm'=Metric; 'h'=Hybrid (UK); 's'=Metric SI units (not available for all APIs) :param hours: Forecast depth in hours, default is 12. Available hours are 6, 12. :return: Weather JSON data """ params = {} if units is not None: params["units"] = units if hours is not None: params["hours"] = hours r = self._http_get( "/cloud/json/weather/forecast/geocode/" + str(latitude) + "/" + str(longitude), params=params, ) j = json.loads(r.text) _check_for_errors(j) return j def get_current_weather_by_geocode(self, latitude, longitude, units=None): """ Get the current weather by geocode (latitude, longitude) :param latitude: Latitude :param longitude: Longitude :param units: Default is Metric. 'e'=English; 'm'=Metric; 'h'=Hybrid (UK); 's'=Metric SI units (not available for all APIs) :return: Weather JSON data """ params = {} if units is not None: params["units"] = units r = self._http_get( "/cloud/json/weather/current/geocode/" + str(latitude) + "/" + str(longitude), params=params, ) j = json.loads(r.text) _check_for_errors(j) return j def get_weather_forecast_by_location(self, location_id, units=None, hours=12): """ Get the weather forecast by Location ID :param location_id: Location ID for which to retrieve the weather forecast :param units: Default is Metric. 'e'=English; 'm'=Metric; 'h'=Hybrid (UK); 's'=Metric SI units (not available for all APIs) :param hours: Forecast depth in hours, default is 12. Available hours are 6, 12. :return: Weather JSON data """ params = {} if units is not None: params["units"] = units if hours is not None: params["hours"] = hours r = self._http_get( "/cloud/json/weather/forecast/location/" + str(location_id), params=params ) j = json.loads(r.text) _check_for_errors(j) return j def get_current_weather_by_location(self, location_id, units=None): """ Get the current weather by Location ID :param location_id: Location ID for which to retrieve the weather forecast :param units: Default is Metric. 'e'=English; 'm'=Metric; 'h'=Hybrid (UK); 's'=Metric SI units (not available for all APIs) :return: Weather JSON data """ params = {} if units is not None: params["units"] = units r = self._http_get( "/cloud/json/weather/current/location/" + str(location_id), params=params ) j = json.loads(r.text) _check_for_errors(j) return j # =========================================================================== # Timers # =========================================================================== def start_timer(self, seconds, function, argument=None, reference=None): """ Start a timer with a relative time in seconds to fire. :param seconds: Number of seconds from now to execute. :param function: Function to execute when the timer fires. This must be a function, not be a class method. :param argument: Optional argument to inject into the fired timer :param reference: Optional ID to reference this timer. Useful if you plan on canceling the timer later. """ absolute_time = self.get_timestamp() self.set_timer( int(absolute_time + (seconds * 1000)), function, argument, reference ) def start_timer_s(self, seconds, function, argument=None, reference=None): """ Start a timer with a relative time in seconds to fire. :param seconds: Number of seconds from now to execute. :param function: Function to execute when the timer fires. This must be a function, not be a class method. :param argument: Optional argument to inject into the fired timer :param reference: Optional ID to reference this timer. Useful if you plan on canceling the timer later. """ self.get_logger(f"{'botengine'}.{__class__.__name__}").debug(">start_timer_s() reference={} ms={} now={} trigger={}".format(reference, seconds * 1000, self.get_timestamp(), int(self.get_timestamp() + (seconds * 1000)))) self.set_alarm( int(self.get_timestamp() + (seconds * 1000)), function, argument, reference ) def start_timer_ms(self, milliseconds, function, argument=None, reference=None): """ Start a timer with a relative time in milliseconds to fire. :param milliseconds: Number of milliseconds from now to execute. :param function: Function to execute when the timer fires. This must be a function, not be a class method. :param argument: Optional argument to inject into the fired timer :param reference: Optional ID to reference this timer. Useful if you plan on canceling the timer later. """ absolute_time = self.get_timestamp() self.get_logger(f"{'botengine'}.{__class__.__name__}").debug("|start_timer_ms() reference={} ms={} now={} trigger={}".format(reference, milliseconds, absolute_time, int(absolute_time + milliseconds))) self.set_alarm(int(absolute_time + milliseconds), function, argument, reference) def set_alarm(self, timestamp_ms, function, argument=None, reference=None): """ Set an alarm with an absolute timestamp :param timestamp_ms: Absolute unix epoch time in milliseconds to fire the timer. :param function: Function to execute when the timer fires. This must be a function, not be a class method. :param argument: Optional argument to inject into the fired timer :param reference: Optional ID to reference this timer. Useful if you plan on canceling the timer later. """ if timestamp_ms < self.get_timestamp() - 31536000000: # Set a timer for over a year ago. Did you accidentally set an absolute alarm and think it was a relative timer? import traceback self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "|set_alarm() A microservice attempted to set a timer/alarm over a year ago back in time. Did you accidentally set an absolute alarm and think it was a relative timer? Please check your logic. timestamp_ms={}; function={}; argument={}; reference={}; traceback={}".format( timestamp_ms, function, argument, reference, traceback.format_stack(), ) ) if ( not self.playback and self.get_trigger_type() & self.TRIGGER_DATA_REQUEST != 0 ): # Illegal operation - data request triggers execute concurrently with other bot executions and therefore your core variable cannot get updated. self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "|set_alarm() You cannot start a timer/alarm while executing a data request trigger. Timer/Alarm reference={}".format( reference ) ) raise BotError( "Cannot start a timer/alarm while executing a data request trigger.", -1 ) saved_timers = self.load_variable(TIMERS_VARIABLE_NAME) if saved_timers is None: saved_timers = [] # Timer tuple is: # (timestamp, function, argument, reference) saved_timers = [ x for x in saved_timers if (x[3] != reference and x[0] != MAXINT) ] saved_timers.append((int(timestamp_ms), function, argument, reference)) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug("|set_alarm() reference={} ms={} now={} trigger={}".format(reference, timestamp_ms, self.get_timestamp(), int(self.get_timestamp() + (timestamp_ms - self.get_timestamp())))) # Log when the latest change to our timer variable is made saved_timers = [x for x in saved_timers if x[0] != MAXINT] saved_timers.append((MAXINT, self.get_timestamp(), None, None)) saved_timers.sort(key=lambda tup: tup[0]) self.save_variable(TIMERS_VARIABLE_NAME, saved_timers) # The end of this bot execution will extract the next timer to execute and set it up def set_timer(self, timestamp, function, argument=None, reference=None): """ Deprecated. Use set_alarm() instead. Set an alarm with an absolute timestamp. :param timestamp: Absolute unix epoch time to fire the timer. :param function: Function to execute when the timer fires. This must be a function, not be a class method. :param argument: Optional argument to inject into the fired timer :param reference: Optional ID to reference this timer. Useful if you plan on canceling the timer later. """ self.set_alarm(timestamp, function, argument, reference) def is_timer_running(self, reference): """ Find out if at least one instance of a particular timer is running :param reference: Search for timers with the given reference. Cannot be None. :return: True if there is at least 1 existing timer with this reference running """ saved_timers = self.load_variable(TIMERS_VARIABLE_NAME) if saved_timers is None: return False self.get_logger(f"{'botengine'}.{__class__.__name__}").debug("|is_timer_running() reference={} timers={}".format(reference, saved_timers)) for ref in [x[3] for x in saved_timers]: if ref == reference: return True return False def timer_timestamp_ms(self, reference): """ Get the timer or alarm's timestamp in milliseconds :param reference: :return: """ saved_timers = self.load_variable(TIMERS_VARIABLE_NAME) if saved_timers is None: return None for ref, ts in [(x[3], x[0]) for x in saved_timers]: if ref == reference: return ts return None def cancel_timers(self, reference): """ Cancel ALL timers with the given reference. :param reference: Search for timers with the given reference and destroy them. Cannot be None. """ saved_timers = self.load_variable(TIMERS_VARIABLE_NAME) if saved_timers is None: saved_timers = [] saved_timers = [ x for x in saved_timers if (x[3] != reference and x[0] != MAXINT) ] saved_timers.append((MAXINT, self.get_timestamp(), None, None)) saved_timers.sort(key=lambda tup: tup[0]) self.save_variable(TIMERS_VARIABLE_NAME, saved_timers) if not self.cancelled_timers and len(saved_timers) <= 1: self._cancel_execution_request() self.cancelled_timers = True def _inspect_timer_stack(self): """ For running locally :return: """ self.get_logger(f"{'botengine'}.{__class__.__name__}").info( Color.PURPLE + "TIMER STACK: " + Color.END ) saved_timers = self.load_variable(TIMERS_VARIABLE_NAME) for t in saved_timers: self.get_logger(f"{'botengine'}.{__class__.__name__}").info( Color.PURPLE + "\t{}".format(t) + Color.END ) def _execute_again_in_n_seconds(self, seconds): """Execute this bot again at a relative time, N seconds from now, without an external trigger :param seconds """ self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Botengine: Executing again in {} seconds".format(seconds) ) params = {"in": int(seconds)} r = self._http_put("/analytic/execute", params=params) j = json.loads(r.text) _check_for_errors(j) def _execute_again_at_timestamp(self, unix_timestamp_ms): """Execute this bot again at an absolute time, at the given timestamp, without an external trigger :param unix_timestamp_ms: """ self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Botengine: Executing again at timestamp {} (delta {}ms)".format(unix_timestamp_ms, unix_timestamp_ms - self.get_timestamp()) ) params = {"at": int(unix_timestamp_ms)} r = self._http_put("/analytic/execute", params=params) j = json.loads(r.text) _check_for_errors(j) # Extract "timer" field, return as int if present, otherwise return 0 if timer was not set return int(j.get("timer", 0)) def _cancel_execution_request(self): """ Cancel any existing requests for delayed executions """ self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "Botengine: Cancelling execution request" ) self._http_delete("/analytic/execute") # =========================================================================== # Questions # =========================================================================== def generate_question( self, key_identifier, response_type, device_id=None, icon=None, icon_font=None, display_type=None, collection=None, editable=False, default_answer=None, correct_answer=None, answer_format=None, urgent=False, front_page=False, send_push=False, send_sms=False, send_email=False, ask_timestamp=None, section_id=0, question_weight=0, ): """ Initializer :param key_identifier: Your own custom key to recognize this question regardless of the language or framing of the question to the user. :param response_type: Type of response we should expect the user to give 1 = Boolean question 2 = Multichoice, Single select (requires response options) 4 = Multichoice, Multi select (requires response options) 6 = Day of the Week 7 = Slider (Default minimum is 0, default maximum is 100, default increment is 5) 8 = Time in seconds since midnight 9 = Datetime (xsd:dateTime format) 10 = Open-ended text :param device_id: Device ID to ask a question about so the UI can reference its name :param icon: Icon to display when asking this question :param icon_font: Icon font to render the icon. See the ICON_FONT_* descriptions in com.ppc.Bot/utilities/utilities.py :param display_type: How to render and display the question in the UI. For example, a Boolean question can be an on/off switch, a yes/no question, or just a single button. See the documentation for more details. :param collection: Collection Name :param editable: True to make this question editable later. This makes the question more like a configuration for the bot that can be adjusted again and again, rather than a one-time question. :param default_answer: Default answer for the question :param correct_answer: This is a regular expression to determine if the user's answer is "correct" or not :param answer_format: Regular expression string that represents what a valid response would look like. All other responses would not be allowed. :param urgent: True if this question is urgent enough that it requires a push notification and should be elevated to the top of the stack after any current delivered questions. Use sparingly to avoid end-user burnout. :param front_page: True if this question should be delivered to the front page of mobile/web bot, when the user is ready to consume another question from the system. :param send_push: True to send this questions as a push notification. Use sparingly to avoid end user burnout. :param send_sms: True to send an SMS message. Because this costs money, this is currently disabled. :param send_email: True to send the question in an email. Use sparingly to avoid burnout and default to junk mail. :param ask_timestamp: Future timestamp to ask this question. If this is not set, the current time will be used. :param section_id: ID of a section, which acts as both the element to group by as well as the weight of the section vs. other sections in the UI. (default is 0) :param question_weight: Weight of an individual question within a grouped section. The lighter the weight, the more the question rises to the top of the list in the UI. (default is 0) """ # This should mirror exactly what the Question object provides return Question( key_identifier, response_type, device_id, icon, icon_font, display_type, collection, editable, default_answer, correct_answer, answer_format, urgent, front_page, send_push, send_sms, send_email, ask_timestamp, section_id, question_weight, ) def ask_question(self, question): """ Ask your question :param question: Question to ask, created by the generate_question method """ self.questions_to_ask[question.key_identifier] = question def delete_question(self, question): """ Delete the question. Best practice is to learn what we need to learn from a question and then delete a question after we're done with it. This can help free up space and increase execution time if we've already learned what we need to learn from this question. :param question: Question to delete """ if question._question_id is None: # Then just don't ask the question if question.key_identifier in self.questions_to_ask: del self.questions_to_ask[question.key_identifier] saved_questions = self.load_variable(QUESTIONS_VARIABLE_NAME) if saved_questions is not None: if question.key_identifier in saved_questions: # Delete the question from our saved questions del saved_questions[question.key_identifier] self.save_variable(QUESTIONS_VARIABLE_NAME, saved_questions) self.questions_to_delete[question.key_identifier] = question def flush_questions(self): """ Synchronize all deleted and new questions with the server. This is called automatically when the bot is finished executing. It should never have to be called manually. """ if self.get_bot_type() == BotEngine.BOT_TYPE_ORGANIZATION_RAG: # Organization RAG bots cannot call APIs return saved_questions = self.load_variable(QUESTIONS_VARIABLE_NAME) if saved_questions is None: saved_questions = {} original_saved_questions = saved_questions.copy() # Delete questions for q_id in self.questions_to_delete: question = self.questions_to_delete[q_id] if self.playback: if question.key_identifier in saved_questions: # Delete the question from our saved questions del saved_questions[question.key_identifier] continue params = {"questionId": question._question_id} r = self._http_delete("/analytic/questions", params=params) j = json.loads(r.text) if j["resultCode"] == 0: if question.key_identifier in saved_questions: # Delete the question from our saved questions del saved_questions[question.key_identifier] # Ask questions if self.playback: for q_id in self.questions_to_ask: question = self.questions_to_ask[q_id] question.answer_status = BotEngine.ANSWER_STATUS_QUEUED saved_questions[question.key_identifier] = question self.save_variable(QUESTIONS_VARIABLE_NAME, saved_questions) self.questions_to_delete = {} self.questions_to_ask = {} return if len(self.questions_to_ask) > 0: body = {"questions": []} for q_id in self.questions_to_ask: question = self.questions_to_ask[q_id] json_question = question._form_json_question() # 'iconFont' field first appeared in server version 1.16, so delete it for previous versions. if not self.is_server_version_newer_than(1, 16): if "iconFont" in json_question: del json_question["iconFont"] body["questions"].append(json_question) self.get_logger(f"{'botengine'}.{__class__.__name__}").info( "|flush_questions() questions={}".format( json.dumps(body, sort_keys=True) ) ) r = self._http_post("/analytic/questions", data=json.dumps(body)) response = json.loads(r.text) self.get_logger( f"{'botengine'}.{__class__.__name__}" ).info( "|flush_questions() | response={}".format( json.dumps(response, sort_keys=True) ) ) if response["resultCode"] == 0 and "questions" in response: for response_block in response["questions"]: question = self.questions_to_ask[response_block["key"]] question._question_id = response_block["id"] question.answer_status = BotEngine.ANSWER_STATUS_QUEUED saved_questions[question.key_identifier] = question elif response["resultCode"] == 6: self.get_logger(f"{'botengine'}.{__class__.__name__}").error( "botengine: Cannot ask questions. {}".format( response["resultCodeMessage"] ) ) if original_saved_questions != saved_questions: # Only save changes if we actually made changes self.save_variable(QUESTIONS_VARIABLE_NAME, saved_questions) self.questions_to_delete = {} self.questions_to_ask = {} def get_asked_questions(self): """ Retrieve a dictionary of previously asked questions that still exist Include any pending questions that need to be saved. Exclude any pending questions that need to be deleted. { "question_id_1" : question_object_1, "question_id_2" : question_object_2 } :return: a dictionary of questions we've previously asked. The question's ID is the dictionary's key, the question itself is the value. """ saved_questions = self.load_variable(QUESTIONS_VARIABLE_NAME) if saved_questions is None: saved_questions = {} # Include any pending questions that need to be saved. for q_id in self.questions_to_ask: if q_id not in saved_questions: saved_questions[q_id] = self.questions_to_ask[q_id] # Exclude any pending questions that need to be deleted. for q_id in self.questions_to_delete: if q_id in saved_questions: del saved_questions[q_id] return saved_questions def retrieve_question(self, key): """ Retrieve a single previously asked question based on its key :param key: Key Identifier generated by the bot developer to track this question :return: A Question object if the question was asked and still exists, None if the question wasn't asked or no longer exists because it was deleted """ saved_questions = self.get_asked_questions() self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "|retrieve_question() key={}; saved_questions.keys()={}".format( key, saved_questions.keys() ) ) if key not in saved_questions: return None return saved_questions[key] def change_answer(self, question, new_answer): """ Change the answer to a previously asked question. One of the best places to use this, for example, is with an Editable question that is being used to configure the bot. Let's say you ask an Editable question, the user answered it which configured your bot, and now your bot has to change behaviors again. You can update the user's answer to show the user what you're currently running off of, allowing the user to adjust the answer again if you want. Sort of a bi-directional back-and-forth "here are what the settings are going to be" so the user and bot can continually agree upon it. :param question: Question to update the answer for :param new_answer: New answer to inject into the question back to the user """ if question._question_id is None: _bot_loggers["botengine"].warn( "Cannot change answer to question: " + str(question.key_identifier) + " because is has never been asked. Set its default_answer instead, then ask it." ) return body = {"answer": new_answer} params = {"questionId": question._question_id} r = self._http_put("/analytic/questions", params=params, data=json.dumps(body)) j = json.loads(r.text) _check_for_errors(j) # Update our saved questions question.answer = new_answer saved_questions = self.load_variable(QUESTIONS_VARIABLE_NAME) if saved_questions is None: # If this every happens is due to developer error, not asking the question before modifying the answer. saved_questions = {} saved_questions[question.key_identifier] = question self.save_variable(QUESTIONS_VARIABLE_NAME, saved_questions) def resynchronize_questions(self): """ Resynchronize our local cache of Questions with the server. :param botengine: BotEngine environment """ self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( ">resynchronize_questions()" ) r = self._http_get("/analytic/questions") j = json.loads(r.text) _check_for_errors(j) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "|resynchronize_questions() questions={}".format( json.dumps(j, sort_keys=True, indent=4) ) ) if "questions" in j: questions = j["questions"] saved_questions = {} for question in questions: self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "|resynchronize_questions() | question={}".format( json.dumps(question, sort_keys=True, indent=4) ) ) q = Question(question["key"], question["responseType"]) q._question_id = question["id"] if "deviceId" in question: q.device_id = question["deviceId"] if "icon" in question: q.icon = question["icon"] if "displayType" in question: q.display_type = question["displayType"] if "editable" in question: q.editable = question["editable"] if "defaultAnswer" in question: q.default_answer = question["defaultAnswer"] if "answer" in question: q.answer = question["answer"] if "creationDateMs" in question: q.ask_timestamp = question["creationDateMs"] if "answerStatus" in question: q.answer_status = question["answerStatus"] if "question" in question: q.question = question["question"] if "answerDateMs" in question: q.answer_time = question["answerDateMs"] if "questionWeight" in question: q.question_weight = question["questionWeight"] if "sectionId" in question: q.section_id = question["sectionId"] if "sectionTitle" in question: q.section_title = question["sectionTitle"] if "answerModified" in question: q.answer_modified = question["answerModified"] if "responseOptions" in question: q.response_options = question["responseOptions"] if "placeholder" in question: q.placeholder = question["placeholder"] if "slider" in question: if "min" in question["slider"]: q.slider_min = question["slider"]["min"] if "max" in question["slider"]: q.slider_max = question["slider"]["max"] if "inc" in question["slider"]: q.slider_inc = question["slider"]["inc"] if "answerFormat" in question: q.answer_format = question["answerFormat"] if "collectionName" in question: q.collection = question["collectionName"] if question["key"] in saved_questions: self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "|resynchronize_questions() | overwrite existing question={}".format( json.dumps( vars(saved_questions[question["key"]]), sort_keys=True, indent=4, ) ) ) else: self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "|resynchronize_questions() | new question={}".format( json.dumps(vars(q), sort_keys=True, indent=4) ) ) saved_questions[question["key"]] = q self.save_variable(QUESTIONS_VARIABLE_NAME, saved_questions) self.get_logger(f"{'botengine'}.{__class__.__name__}").debug( "