{ "metadata": { "language": "Julia", "name": "", "signature": "sha256:0268061fda1c421f741f0884df8c4b09e9fe4f215aa0f622aaa67f6c166afe1b" }, "nbformat": 3, "nbformat_minor": 0, "worksheets": [ { "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Entropy Maximization\n", "\n", "Here is a constrained entropy maximization problem:\n", "\n", "\\begin{array}{ll}\n", " \\mbox{maximize} & -\\sum_{i=1}^n x_i \\log x_i \\\\\n", " \\mbox{subject to} & \\mathbf{1}' x = 1 \\\\\n", " & Ax \\leq b\n", "\\end{array}\n", "\n", "where $x \\in \\mathbf{R}^n$ is our optimization variable and $A \\in \\mathbf{R}^{m \\times n}, b \\in \\mathbf{R}^{m}$.\n", "\n", "To solve this, we can simply use the `entropy` operation Convex.jl provides." ] }, { "cell_type": "code", "collapsed": false, "input": [ "using Convex, SCS\n", "\n", "n = 25;\n", "m = 15;\n", "A = randn(m, n); \n", "b = rand(m, 1); \n", "\n", "x = Variable(n);\n", "problem = maximize(entropy(x), sum(x) == 1, A * x <= b)\n", "solve!(problem, SCSSolver(verbose=0))\n", "\n", "println(problem.optval)\n", "println(x.value)" ], "language": "python", "metadata": {}, "outputs": [ { "output_type": "stream", "stream": "stdout", "text": [ "3." ] }, { "output_type": "stream", "stream": "stdout", "text": [ "2188073846026657\n", "[" ] }, { "output_type": "stream", "stream": "stdout", "text": [ "0.039596694193772826\n", " 0.040060435043293534\n", " 0.04019878866073644\n", " 0.03983978331717988\n", " 0.03937174306719803\n", " 0.040288113034394835\n", " 0.04068296295587558\n", " 0.040348699741541545\n", " 0.03977943218842446\n", " 0.03920155057856007\n", " 0.03967456574632815\n", " 0.03965070447384114\n", " 0.04005546388236403\n", " 0.040085272144984585\n", " 0.04063891088176249\n", " 0.040633416079841944\n", " 0.04053676010233853\n", " 0.0401124273142282\n", " 0.04065132469849484\n", " 0.03909258717640152\n", " 0.04005131432373106\n", " 0.03929182732897887\n", " 0.04009644317279356\n", " 0.04046763019265253\n", " 0.03959301166868323]\n" ] } ], "prompt_number": 6 } ], "metadata": {} } ] }