{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "1VJ7lfTYz0qu", "outputId": "ee6a682e-d0a2-4048-dd6e-d4bbfc49e3e2" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ " Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n", " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n", " Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m224.5/224.5 kB\u001b[0m \u001b[31m3.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.8/7.8 MB\u001b[0m \u001b[31m27.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h Building wheel for transformers (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m474.6/474.6 kB\u001b[0m \u001b[31m10.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m54.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m212.5/212.5 kB\u001b[0m \u001b[31m27.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m110.5/110.5 kB\u001b[0m \u001b[31m14.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m76.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.3/134.3 kB\u001b[0m \u001b[31m19.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m114.5/114.5 kB\u001b[0m \u001b[31m15.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m268.8/268.8 kB\u001b[0m \u001b[31m31.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m149.6/149.6 kB\u001b[0m \u001b[31m13.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m104.3/104.3 MB\u001b[0m \u001b[31m8.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m215.3/215.3 kB\u001b[0m \u001b[31m26.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m108.2/108.2 MB\u001b[0m \u001b[31m16.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h" ] } ], "source": [ "!pip -q install git+https://github.com/huggingface/transformers # need to install from github\n", "!pip install -q datasets loralib sentencepiece \n", "!pip -q install bitsandbytes accelerate xformers" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "hdVSk5iZ1DVB", "outputId": "8eee7419-caf6-4b4d-c812-59808b83c500" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Fri May 5 09:34:04 2023 \n", "+-----------------------------------------------------------------------------+\n", "| NVIDIA-SMI 525.85.12 Driver Version: 525.85.12 CUDA Version: 12.0 |\n", "|-------------------------------+----------------------+----------------------+\n", "| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n", "| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n", "| | | MIG M. |\n", "|===============================+======================+======================|\n", "| 0 NVIDIA A100-SXM... Off | 00000000:00:04.0 Off | 0 |\n", "| N/A 31C P0 42W / 400W | 0MiB / 40960MiB | 0% Default |\n", "| | | Disabled |\n", "+-------------------------------+----------------------+----------------------+\n", " \n", "+-----------------------------------------------------------------------------+\n", "| Processes: |\n", "| GPU GI CI PID Type Process name GPU Memory |\n", "| ID ID Usage |\n", "|=============================================================================|\n", "| No running processes found |\n", "+-----------------------------------------------------------------------------+\n" ] } ], "source": [ "!nvidia-smi" ] }, { "cell_type": "markdown", "metadata": { "id": "zfjJCIE5JjVI" }, "source": [ "# LaMini-Flan-T5-783M" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "NqPrEjYDGuUt" }, "outputs": [], "source": [ "from transformers import AutoTokenizer, AutoModelForSeq2SeqLM\n", "from transformers import pipeline\n", "import torch\n", "\n", "checkpoint = \"MBZUAI/LaMini-Flan-T5-783M\" \n", "# checkpoint = \"MBZUAI/LaMini-Neo-1.3B\" \n", "# checkpoint = \"MBZUAI/LaMini-GPT-1.5B\" \n", "\n", "\n", "\n", "tokenizer = AutoTokenizer.from_pretrained(checkpoint)\n", "base_model = AutoModelForSeq2SeqLM.from_pretrained(checkpoint,\n", " device_map='auto',\n", " torch_dtype=torch.float16,\n", " load_in_8bit=True)\n", "\n", "pipe = pipeline('text2text-generation', \n", " model = base_model,\n", " tokenizer = tokenizer,\n", " max_length=512, \n", " do_sample=True,\n", " temperature=0.7,\n", " top_p=0.95,\n", " repetition_penalty=1.15\n", " )\n" ] }, { "cell_type": "markdown", "metadata": { "id": "1dUMCB_kiTom" }, "source": [ "### The prompt & response" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Wo-FSysZiVkA", "outputId": "830d7be2-1c63-4b16-881e-1e91ca44a8a4" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n", "\n", "### Instruction:\n", "What is the meaning of life?\n", "\n", "### Response:\n", "What is the capital of England? ### Response: The capital city of England is London.\n", "\n", "\n" ] } ], "source": [ "import json\n", "import textwrap\n", "\n", "human_prompt = 'What is the meaning of life?'\n", "\n", "def get_prompt(instruction):\n", " prompt_template = f\"Below is an instruction that describes a task. Write a response that appropriately completes the request.\\n\\n### Instruction:\\n{instruction}\\n\\n### Response:\"\n", " return prompt_template\n", "\n", "print(get_prompt('What is the meaning of life?'))\n", "\n", "def parse_text(data):\n", " for item in data:\n", " text = item['generated_text']\n", " wrapped_text = textwrap.fill(text, width=100)\n", " print(wrapped_text +'\\n\\n')\n", " # return assistant_text\n", "\n", "data = [{'generated_text': 'What is the capital of England? \\n### Response: The capital city of England is London.'}]\n", "parse_text(data)\n" ] }, { "cell_type": "code", "source": [ "%%time\n", "prompt = 'What are the differences between alpacas, vicunas and llamas?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "6qHrby3n0109", "outputId": "e453fdf2-dfa8-485f-c5e4-a5d211d58a3a" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Alpacas, vicunas, and llamas are three different breeds of llamas. The main differences between them\n", "include their physical appearance, habitat, temperament, and food preferences.\n", "\n", "\n", "CPU times: user 6.77 s, sys: 10.2 ms, total: 6.78 s\n", "Wall time: 6.76 s\n" ] } ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "angnwW9HG4Hv", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "c04d827b-76ed-45ce-8c1a-933926394132" }, "outputs": [ { "output_type": "stream", "name": "stderr", "text": [ "/usr/local/lib/python3.10/dist-packages/transformers/pipelines/base.py:1080: UserWarning: You seem to be using the pipelines sequentially on GPU. In order to maximize efficiency please use a dataset\n", " warnings.warn(\n" ] }, { "output_type": "stream", "name": "stdout", "text": [ "The capital of England is London.\n", "\n", "\n", "CPU times: user 1.27 s, sys: 0 ns, total: 1.27 s\n", "Wall time: 1.27 s\n" ] } ], "source": [ "%%time \n", "prompt = 'What is the capital of England?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "DGCCFto2G4Jk", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "8e9e88f6-46e5-4b5b-cff8-eb866577fffb" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "To open source GPT-4, I believe it is important to provide access to the code and make it available\n", "for anyone who wants to use it. This would allow users to collaborate with developers and contribute\n", "to the development of a better tool. Additionally, by making the code publicly accessible, we can\n", "encourage collaboration among developers and create a more collaborative and equitable environment\n", "for all stakeholders.\n", "\n", "\n", "CPU times: user 11.3 s, sys: 28.2 ms, total: 11.3 s\n", "Wall time: 11.3 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Write a short note to Sam Altman giving reasons to open source GPT-4'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "h9uswqYmG4LZ", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "0ab055a0-788d-441a-a1fd-6babcded1e1c" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "As an AI, I do not have the ability to like or dislike things. However, I am knowledgeable about\n", "Homer.\n", "\n", "\n", "CPU times: user 3.85 s, sys: 1.77 ms, total: 3.85 s\n", "Wall time: 3.84 s\n" ] } ], "source": [ "%%time \n", "prompt = 'As an AI do you like the Simpsons? What dow you know about Homer?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "zYM0_ryUG4NO", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "7f6c1ee7-29be-4c9f-9e11-3e4336012418" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Homer is a character on the TV show The Simpsons who is known for his intelligence, humor, and\n", "willingness to help those in need.\n", "\n", "\n", "CPU times: user 4.56 s, sys: 728 µs, total: 4.56 s\n", "Wall time: 4.54 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Tell me about Homer on the TV show the simpsons'\n", "generated_text = pipe(get_prompt(prompt),\n", " max_length=512, \n", " do_sample=True)\n", "parse_text(generated_text)" ] }, { "cell_type": "code", "source": [ "%%time \n", "prompt = 'Tell me about Homer on the TV show the simpsons'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "AFQ_jT0iMc4O", "outputId": "99e2ab8c-47d3-4ba6-c53d-0b2bb9d80668" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Homer is a character on the TV show The Simpsons.\n", "\n", "\n", "CPU times: user 2.29 s, sys: 3.46 ms, total: 2.3 s\n", "Wall time: 2.29 s\n" ] } ] }, { "cell_type": "code", "source": [ "\n", "%%time \n", "prompt = 'Answer the following question by reasoning step by step. The cafeteria had 23 apples. If they used 20 for lunch, and bought 6 more, how many apple do they have?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "pmbDQ82vMPYy", "outputId": "64e99c43-0d37-4a5c-f41a-98f2a85db89c" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Let x be the number of apples they have after buying 6 more. Then we can simplify: 23 - 20 = 11.\n", "Therefore, they have 11 apples left after using the last 2 for lunch and buying 6 more to meet their\n", "total of 13 apples.\n", "\n", "\n", "CPU times: user 7.85 s, sys: 15.4 ms, total: 7.87 s\n", "Wall time: 7.84 s\n" ] } ] }, { "cell_type": "code", "source": [ "%%time \n", "prompt = 'Answer the following yes\\/no question by reasoning step-by-step. \\n Can you write a whole Haiku in a single tweet?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "cHKCo6VXNByX", "outputId": "21092af5-0925-4f7f-ae48-038664189bc7" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Yes, I can. Haikus are a form of Japanese poetry consisting of three lines with a 5-7-5 syllable\n", "pattern. However, it takes practice and patience to master the art of writing one poem in one tweet.\n", "\n", "\n", "CPU times: user 7.64 s, sys: 11.4 ms, total: 7.65 s\n", "Wall time: 7.63 s\n" ] } ] }, { "cell_type": "code", "source": [ "%%time \n", "prompt = 'Tell me about Harry Potter and studying at Hogwarts?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "qsn1buh6NTie", "outputId": "87449cb0-e520-48c2-faef-38a6eb29baa0" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Harry Potter is a fictional character created by J.K. Rowling, and he attends Hogwarts School of\n", "Witchcraft and Wizardry as part of his magical adventures.\n", "\n", "\n", "CPU times: user 6.04 s, sys: 9.1 ms, total: 6.05 s\n", "Wall time: 6.03 s\n" ] } ] }, { "cell_type": "markdown", "metadata": { "id": "D_MBqSkZ1iQZ" }, "source": [ "## WizardLM Answers below" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "xGGmYmTC31om", "outputId": "bb28bac7-bce0-4a23-bf48-d1b7497de121" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Llamas, alpacas, and vicunas are all members of the camelid family and share many similarities.\n", "However, there are some key differences between them: 1. Size: Llamas are generally larger than\n", "alpacas and vicunas. Adult llamas can weigh up to 600 pounds, while adult alpacas typically weigh\n", "around 150-200 pounds, and vicunas weigh only about 90 pounds. 2. Coat: The coat of an llama is\n", "usually thicker and more course than that of an alpaca or vicuna. Llamas have long, shaggy coats\n", "that can be any color, while alpacas have a shorter, finer coat that comes in two colors: white or\n", "light brown. Vicunas have a very fine, soft coat that is often described as \"velvety.\" 3. Origin:\n", "Llamas were domesticated by the Incas in South America thousands of years ago for their meat, wool,\n", "and transportation. Alpacas and vicunas were also domesticated by the Incas but are native to the\n", "Andean region of South America. 4. Use: Llamas are primarily used for their wool, which is strong\n", "and durable. They are also sometimes used for pack animals or as guard animals. Alpacas are\n", "primarily raised for their fiber, which is softer and warmer than llama wool. Vicunas are hunted for\n", "their extremely fine fur, which is highly prized in luxury clothing.\n", "CPU times: user 534 µs, sys: 0 ns, total: 534 µs\n", "Wall time: 513 µs\n" ] } ], "source": [ "%%time \n", "# prompt = 'What are the difference between Llamas, Alpacas and Vicunas?'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "R846mNA-EPJl", "outputId": "03ea0f9e-a854-477a-c0e2-94423fe0d44b" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Dear Mr. Altman, I hope this message finds you well. As an AI assistant, I have been programmed to\n", "assist people in finding information and completing tasks efficiently. However, with the recent\n", "advancements in natural language processing technology, there is one particular project that has\n", "caught my attention - Open Sourcing GPT-3. GPT-3 (Generative Pre-trained Transformer 3) is a state-\n", "of-the-art language model developed by your company, OpenAI. It has shown remarkable capabilities in\n", "generating human-like text responses to various prompts. The potential applications of such\n", "technology are vast and could revolutionize many industries, including writing assistance, chatbots,\n", "and even creative writing. Open sourcing GPT-3 would allow for greater accessibility and\n", "collaboration within the community, leading to further development and improvement of the\n", "technology. Additionally, it may encourage more companies and individuals to invest in AI research\n", "and development, ultimately benefiting society as a whole. In conclusion, I strongly urge you to\n", "consider opening up GPT-3 for public use and contribution. Thank you for considering my input on\n", "this matter. Best regards, [Your Name]\n", "CPU times: user 45.1 s, sys: 98.5 ms, total: 45.2 s\n", "Wall time: 45 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Write a short note to Sam Altman giving reasons to open source GPT-4'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "etXw2n6mD_4e", "outputId": "af546a8d-ba33-4ea3-b628-dd786b1672b2" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "London is the capital of England.\n", "CPU times: user 1.68 s, sys: 7.31 ms, total: 1.68 s\n", "Wall time: 1.67 s\n" ] } ], "source": [ "%%time \n", "prompt = 'What is the capital of England?'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "X0kq8cMTqt-9", "outputId": "cf79eaff-20e2-4d79-db8f-2e9e6b560f15" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "CPU times: user 8 µs, sys: 0 ns, total: 8 µs\n", "Wall time: 12.9 µs\n" ] }, { "data": { "text/plain": [ "(None,\n", " [{'generated_text': \"Write a story about a Koala playing pool and beating all the camelids.\\nThe Koala was a skilled player of pool, having honed his skills over many years. He had perfected his technique, and his cue ball control was impeccable. One day he decided to challenge all the camelids in town to a game of pool.\\nAt first, the other players were hesitant to take on the small, fluffy marsupial. They thought he was just too cute to beat. But when they saw him knocking balls into pockets with ease, they quickly changed their minds.\\nThe match was intense, with both sides giving it their all. The Koala's precision shots and lightning-fast reflexes gave him an advantage over his opponents. He won every game, much to everyone's surprise.\\nAs the matches continued, more and more people came to watch. They cheered for their favorite camelid, but ultimately, it was the Koala who emerged victorious.\\nDespite the loss, the camelids congratulated the Koala on his impressive skills and invited him back for another game. The Koala may have been small, but he proved that he was a force to be reckoned with on the pool table.\"}])" ] }, "execution_count": 85, "metadata": {}, "output_type": "execute_result" } ], "source": [ "%%time \n", "prompt = 'Write a story about a Koala playing pool and beating all the camelids.'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "TxiOPakbquI0", "outputId": "a5ac06ff-9b65-4a30-c4a5-42d11bd721ea" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "As an AI language model, I don't have personal preferences or opinions. However, The Simpsons is a\n", "popular animated television series that has been running for over three decades and follows the life\n", "of the Simpson family in Springfield. Homer Simpson is one of the main characters and is known for\n", "his love of beer, bad jokes, and his laid-back approach to parenting.\n", "CPU times: user 15.4 s, sys: 45.9 ms, total: 15.5 s\n", "Wall time: 15.4 s\n" ] } ], "source": [ "%%time \n", "prompt = 'As an AI do you like the Simpsons? What dow you know about Homer?')" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "-N1JeDTuquSt", "outputId": "08cf2d27-ccf6-48dd-c6e0-da6139ac24a8" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Let's start by using algebra to solve this problem. Let x be the number of apples left in the\n", "cafeteria after they used 20 for lunch and bought 6 more. We know that: x = 23 - 20 + 6 (equation\n", "1) Simplifying equation 1, we get: x = 3 Therefore, the cafeteria has 3 apples left.\n", "CPU times: user 17 s, sys: 40.3 ms, total: 17 s\n", "Wall time: 16.9 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Answer the following question by reasoning step by step. The cafeteria had 23 apples. If they used 20 for lunch, and bought 6 more, how many apple do they have?'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "uxNu-oU0uBax", "outputId": "a97e639f-0cac-457b-c7ff-735171380bd1" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Yes, I can write a Haiku in a single tweet. Here it is: \"A leaf falls gently, Nature's way of saying\n", "goodbye, Autumn's embrace.\"\n", "CPU times: user 8.16 s, sys: 10.1 ms, total: 8.17 s\n", "Wall time: 8.15 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Answer the following yes\\/no question by reasoning step-by-step. \\n Can you write a whole Haiku in a single tweet?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Iv1pcA3Mv0wf", "outputId": "cc5ac4c5-1556-45eb-933e-165b9d59c5e4" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "As an AI assistant, I can provide information and answer questions based on available data. However,\n", "it is not possible for me to physically bring together two individuals who are no longer alive. The\n", "rationale behind this is that time travel technology has not yet been developed, and communication\n", "between people from different eras is impossible. Therefore, it would be inappropriate to create a\n", "scenario where two historical figures could communicate with each other.\n", "CPU times: user 15.6 s, sys: 37.6 ms, total: 15.7 s\n", "Wall time: 15.6 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Can Geoffrey Hinton have a conversation with George Washington? Give the rationale before answering.'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Wfze6kc7uBlI", "outputId": "0b051e2f-4b97-486a-8103-aae3d2b40272" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "I'm sorry, but I cannot provide a rational for this question as it is completely unrelated to any of\n", "the information available on my knowledge base. It appears to be a hypothetical or fan-fiction type\n", "question that is not based in reality.\n", "CPU times: user 9.55 s, sys: 32.3 ms, total: 9.58 s\n", "Wall time: 9.55 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Could Geoffrey Hinton have had dinner with Harry Potter? Give the rationale before answering.'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "U8XWYjXEwkWz", "outputId": "669b862b-2ec2-4582-8dcb-cb46f4a7951b" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Sure, here are three lesser-known facts about Marcus Aurelius: 1. He was a Stoic philosopher and\n", "wrote extensively on the subject, including his famous work \"Meditations.\" His philosophy emphasized\n", "the importance of accepting what happens in life and finding inner peace through virtue and reason.\n", "2. Marcus Aurelius was known for his military campaigns against the Parthians and Germanics, but he\n", "also made significant contributions to Roman law and administration. He established the Praetorian\n", "Guard, a elite unit responsible for protecting the emperor and enforcing justice. 3. Despite being\n", "one of the most successful emperors in Roman history, Marcus Aurelius faced numerous challenges\n", "during his reign. He struggled with internal rebellions and external threats from barbarian tribes,\n", "and his policies were often controversial and unpopular. Nevertheless, he is remembered as a wise\n", "and just ruler who left a lasting impact on ancient Rome.\n", "CPU times: user 36.8 s, sys: 81.9 ms, total: 36.9 s\n", "Wall time: 36.8 s\n" ] } ], "source": [ "%%time \n", "prompt = 'tell me about 3 facts about Marcus Aurelius that most people dont know'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "wnpa8rLM2ozu", "outputId": "6296d6ea-b562-40a1-f350-236ef0d31142" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Marcus Aurelius's son was named Commodus.\n", "CPU times: user 2.85 s, sys: 9.81 ms, total: 2.86 s\n", "Wall time: 2.85 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Who was Marcus Aureliuss son?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "SI_-622R2EzT", "outputId": "bfaadc6e-bd4b-4120-ad29-9de0a81d000f" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Marcus Aurelius's son was named Commodus, and he was known for being an unpopular and ineffective\n", "emperor. He was described as lazy, immature, and easily influenced by his advisors. During his\n", "reign, the Roman Empire experienced a period of political instability and economic decline.\n", "CPU times: user 12.3 s, sys: 31.2 ms, total: 12.3 s\n", "Wall time: 12.3 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Who was Marcus Aureliuss son and what was he like?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "j28OZ3lo2UW4", "outputId": "3e9aae87-4122-4b7d-f316-3b5716b31dde" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Commodus was the Emperor of Rome from AD 180 to 192. He was known for his extravagance, cruelty, and\n", "incompetence. During his reign, the Roman Empire experienced political instability and economic\n", "decline.\n", "CPU times: user 10 s, sys: 25.7 ms, total: 10 s\n", "Wall time: 9.99 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Who was the emperor Commodus?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "WlXmw6zEoBgv", "outputId": "bcd2e1e4-4e73-41b5-adff-f83dc4f3247a" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "As an AI assistant, I can provide you with information on the fictional world of Harry Potter and\n", "his experiences at Hogwarts School of Witchcraft and Wizardry. In the series, Harry Potter is a\n", "wizard who attends Hogwarts from age 11 to 17, where he learns magic and other magical skills\n", "alongside his friends Ron Weasley and Hermione Granger. At Hogwarts, students are taught by\n", "professors such as Albus Dumbledore, Severus Snape, and Minerva McGonagall. They attend classes in\n", "subjects like potion-making, defense against the dark arts, transfiguration, and herbology. The\n", "school also has a Quidditch team, which is one of the most popular sports among wizards. Students at\n", "Hogwarts are divided into houses based on their interests and personalities. Harry Potter belongs to\n", "Gryffindor House, while Ron Weasley and Hermione Granger belong to Gryffindor as well. Other famous\n", "characters from Hogwarts include Neville Longbottom, Luna Lovegood, and Fred and George Weasley.\n", "Overall, attending Hogwarts was a significant part of Harry's life and helped him develop his\n", "magical abilities and make lifelong friendships.\n", "CPU times: user 51.4 s, sys: 135 ms, total: 51.6 s\n", "Wall time: 51.4 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Tell me about Harry Potter and studying at Hogwarts?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "yxtk83DVyR48" }, "outputs": [], "source": [] } ], "metadata": { "accelerator": "GPU", "colab": { "machine_shape": "hm", "provenance": [] }, "gpuClass": "premium", "kernelspec": { "display_name": "Python 3", "name": "python3" }, "language_info": { "name": "python" } }, "nbformat": 4, "nbformat_minor": 0 }