{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "1VJ7lfTYz0qu", "outputId": "ec155aaf-0db4-4c22-c21f-80a75b4d8a62" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ " Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n", " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n", " Preparing metadata (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m7.8/7.8 MB\u001b[0m \u001b[31m61.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m224.5/224.5 kB\u001b[0m \u001b[31m27.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h Building wheel for transformers (pyproject.toml) ... \u001b[?25l\u001b[?25hdone\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m474.6/474.6 kB\u001b[0m \u001b[31m10.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.3/1.3 MB\u001b[0m \u001b[31m53.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m110.5/110.5 kB\u001b[0m \u001b[31m15.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m69.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m134.3/134.3 kB\u001b[0m \u001b[31m17.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m212.5/212.5 kB\u001b[0m \u001b[31m27.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m268.8/268.8 kB\u001b[0m \u001b[31m32.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m114.5/114.5 kB\u001b[0m \u001b[31m14.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m149.6/149.6 kB\u001b[0m \u001b[31m19.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m104.3/104.3 MB\u001b[0m \u001b[31m16.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m215.3/215.3 kB\u001b[0m \u001b[31m25.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", "\u001b[?25h" ] } ], "source": [ "!pip -q install git+https://github.com/huggingface/transformers # need to install from github\n", "!pip install -q datasets loralib sentencepiece \n", "!pip -q install bitsandbytes accelerate" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "hdVSk5iZ1DVB", "outputId": "a5dea8ae-27ed-4881-c95e-75d5c3f0f5f2" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Fri May 5 07:25:14 2023 \n", "+-----------------------------------------------------------------------------+\n", "| NVIDIA-SMI 525.85.12 Driver Version: 525.85.12 CUDA Version: 12.0 |\n", "|-------------------------------+----------------------+----------------------+\n", "| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n", "| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n", "| | | MIG M. |\n", "|===============================+======================+======================|\n", "| 0 NVIDIA A100-SXM... Off | 00000000:00:04.0 Off | 0 |\n", "| N/A 30C P0 46W / 400W | 0MiB / 40960MiB | 0% Default |\n", "| | | Disabled |\n", "+-------------------------------+----------------------+----------------------+\n", " \n", "+-----------------------------------------------------------------------------+\n", "| Processes: |\n", "| GPU GI CI PID Type Process name GPU Memory |\n", "| ID ID Usage |\n", "|=============================================================================|\n", "| No running processes found |\n", "+-----------------------------------------------------------------------------+\n" ] } ], "source": [ "!nvidia-smi" ] }, { "cell_type": "markdown", "metadata": { "id": "zfjJCIE5JjVI" }, "source": [ "# LaMini-LM Neo 1.3B" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "NqPrEjYDGuUt" }, "outputs": [], "source": [ "from transformers import AutoTokenizer, AutoModelForCausalLM\n", "from transformers import pipeline\n", "import torch\n", "\n", "# checkpoint = \"MBZUAI/LaMini-Flan-T5-783M\" \n", "checkpoint = \"MBZUAI/LaMini-Neo-1.3B\" \n", "# checkpoint = \"MBZUAI/LaMini-GPT-1.5B\" \n", "\n", "\n", "\n", "tokenizer = AutoTokenizer.from_pretrained(checkpoint)\n", "base_model = AutoModelForCausalLM.from_pretrained(checkpoint,\n", " device_map='auto',\n", " torch_dtype=torch.float16,\n", " load_in_8bit=True)\n", "\n", "pipe = pipeline('text-generation', \n", " model = base_model,\n", " tokenizer = tokenizer,\n", " max_length=512, \n", " do_sample=True,\n", " pad_token_id= 50256,\n", " temperature=0.7,\n", " top_p=0.95,\n", " repetition_penalty=1.15\n", " )\n" ] }, { "cell_type": "markdown", "metadata": { "id": "1dUMCB_kiTom" }, "source": [ "### The prompt & response" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Wo-FSysZiVkA", "outputId": "dc181d27-d7cc-4216-e13f-c4f6b6040eb6" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Below is an instruction that describes a task. Write a response that appropriately completes the request.\n", "\n", "### Instruction:\n", "What is the meaning of life?\n", "\n", "### Response:\n", "The capital city of England is London.\n", "\n", "\n" ] } ], "source": [ "import json\n", "import textwrap\n", "\n", "human_prompt = 'What is the meaning of life?'\n", "\n", "def get_prompt(instruction):\n", " prompt_template = f\"Below is an instruction that describes a task. Write a response that appropriately completes the request.\\n\\n### Instruction:\\n{instruction}\\n\\n### Response:\"\n", " return prompt_template\n", "\n", "print(get_prompt('What is the meaning of life?'))\n", "\n", "def parse_text(data):\n", " for item in data:\n", " text = item['generated_text']\n", " assistant_text_index = text.find('### Response:')\n", " if assistant_text_index != -1:\n", " assistant_text = text[assistant_text_index+len('### Response:'):].strip()\n", " wrapped_text = textwrap.fill(assistant_text, width=100)\n", " print(wrapped_text +'\\n\\n')\n", " # return assistant_text\n", "\n", "data = [{'generated_text': '### Human: What is the capital of England? \\n### Response: The capital city of England is London.'}]\n", "parse_text(data)\n" ] }, { "cell_type": "code", "source": [ "%%time\n", "prompt = 'What are the differences between alpacas, vicunas and llamas?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "6qHrby3n0109", "outputId": "8c7184b8-6109-4721-ced8-bfbcfcb19ee1" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Alpacas, vicunas and llamas all belong to the same genus Poa, but they differ in their physical\n", "characteristics and use. Alpacas have white fleece, long necks, and fluffy fur that covers their\n", "bodies. Vicunas have a brown coloration with a fuzzy, feathered coat, and long, slender necks.\n", "Llamas have a reddish-brown coloration, short necks, and smooth, white fur that covers their bodies.\n", "\n", "\n", "CPU times: user 11.9 s, sys: 32.7 ms, total: 11.9 s\n", "Wall time: 11.9 s\n" ] } ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "angnwW9HG4Hv", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "da6b7c5f-9228-447c-c43e-12a81f96e18c" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "London.\n", "\n", "\n", "CPU times: user 370 ms, sys: 888 µs, total: 371 ms\n", "Wall time: 370 ms\n" ] } ], "source": [ "%%time \n", "prompt = 'What is the capital of England?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "DGCCFto2G4Jk", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "c2899cf9-070d-46a8-eb75-010352a630c7" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Dear Sam, I hope this message finds you well. I wanted to share some reasons why you may want to\n", "consider opening source GPT-4 with me. First and foremost, GPT-4 is currently used in a wide range\n", "of applications such as computer vision, natural language processing, and gaming. It has a strong\n", "community of developers and users who are constantly updating and improving it. Secondly, GPT-4's\n", "code is freely available on GitHub, making it accessible to anyone who wants to contribute to its\n", "development. Thirdly, GPT-4's flexibility allows for rapid experimentation and adaptation to new\n", "technologies and challenges. Finally, GPT-4 can be trained on large amounts of data, making it a\n", "powerful tool for AI research and education. Please let me know if you have any questions or would\n", "like more information about my services. Thank you for considering my offer!\n", "\n", "\n", "CPU times: user 22.9 s, sys: 53.8 ms, total: 23 s\n", "Wall time: 22.9 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Write a short note to Sam Altman giving reasons to open source GPT-4'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "h9uswqYmG4LZ", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "959dc216-ad04-459a-9a48-8c8c94ff9e4f" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "As an AI language model, I don't have personal preferences or knowledge about TV shows. However, if\n", "you are asking about the creator of the show \"The Simpsons,\" Stephen Colbert has made many\n", "references to his character, Homer Simpson, and his interactions with various characters on the\n", "show.\n", "\n", "\n", "CPU times: user 7.16 s, sys: 16.1 ms, total: 7.18 s\n", "Wall time: 7.15 s\n" ] } ], "source": [ "%%time \n", "prompt = 'As an AI do you like the Simpsons? What dow you know about Homer?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "zYM0_ryUG4NO", "colab": { "base_uri": "https://localhost:8080/" }, "outputId": "9295b50d-137d-4fa0-b3e8-874248e4df03" }, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Homer on The Simpsons is a fictional character who is a beloved and hilarious character known for\n", "his sarcastic humor, his love of pizza, and his inability to keep a secret. He has appeared in\n", "various episodes throughout the series' run, including \"The Big Bang Theory,\" \"The Simpsons,\" and\n", "\"Homer Jumps Over Broomfield.\" Throughout the show's history, Homer has been portrayed by a variety\n", "of actors, but he remains one of the most iconic characters in pop culture history.\n", "\n", "\n", "CPU times: user 12.2 s, sys: 28.4 ms, total: 12.2 s\n", "Wall time: 12.2 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Tell me about Homer on the TV show the simpsons'\n", "generated_text = pipe(get_prompt(prompt),\n", " max_length=512, \n", " do_sample=True)\n", "parse_text(generated_text)" ] }, { "cell_type": "code", "source": [ "%%time \n", "prompt = 'Tell me about Homer on the TV show the simpsons'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "AFQ_jT0iMc4O", "outputId": "8328a69b-71c0-41e9-c076-9dc571d0de9a" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Homer on The Simpsons is a fictional character who appears in multiple episodes of the animated TV\n", "show \"The Simpsons\". He is portrayed as an anthropomorphic bear with glasses and a fondness for\n", "mittens, who often wears a red sweater and blue jeans. He is known for his love of food,\n", "particularly pizza and cheeseburgers, and his tendency to get into arguments with other characters.\n", "Homer's personality and quirks have been well-documented by many fans and comedians, and he remains\n", "a beloved character on the show.\n", "\n", "\n", "CPU times: user 12.8 s, sys: 20.9 ms, total: 12.8 s\n", "Wall time: 12.8 s\n" ] } ] }, { "cell_type": "code", "source": [ "\n", "%%time \n", "prompt = 'Answer the following question by reasoning step by step. The cafeteria had 23 apples. If they used 20 for lunch, and bought 6 more, how many apple do they have?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "pmbDQ82vMPYy", "outputId": "721dd871-eaa6-45d7-8408-726a80442b2f" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "To find out how many apples the cafeteria had if they used 20 for lunch and bought 6 more, we need\n", "to subtract the number of apples they had before from the number of apples they bought after.\n", "First, we subtract the number of apples they had before (23) from the number of apples they bought\n", "after (20): 20 - 23 = 3 Then, we multiply 3 by the number of apples they bought after (6): 6 x\n", "3 = 18 Therefore, the cafeteria had 18 apples after buying 6 more.\n", "\n", "\n", "CPU times: user 14.6 s, sys: 39.4 ms, total: 14.6 s\n", "Wall time: 14.6 s\n" ] } ] }, { "cell_type": "code", "source": [ "%%time \n", "prompt = 'Answer the following yes\\/no question by reasoning step-by-step. \\n Can you write a whole Haiku in a single tweet?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "cHKCo6VXNByX", "outputId": "26364b85-2797-42fe-efa0-5342f278f68c" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Yes, I can! Here's a possible answer: Raindrops on my face, Water falling from the sky. Nature's\n", "melody, so sweet.\n", "\n", "\n", "CPU times: user 4.41 s, sys: 5.61 ms, total: 4.41 s\n", "Wall time: 4.4 s\n" ] } ] }, { "cell_type": "code", "source": [ "%%time \n", "prompt = 'Tell me about Harry Potter and studying at Hogwarts?'\n", "generated_text = pipe(get_prompt(prompt))\n", "parse_text(generated_text)" ], "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "qsn1buh6NTie", "outputId": "fb1bc34c-acbc-4d4b-a7a0-771aa56c87f5" }, "execution_count": null, "outputs": [ { "output_type": "stream", "name": "stdout", "text": [ "Harry Potter attended Hogwarts School of Witchcraft and Wizardry as part of his magical education.\n", "\n", "\n", "CPU times: user 2.28 s, sys: 4.21 ms, total: 2.29 s\n", "Wall time: 2.28 s\n" ] } ] }, { "cell_type": "markdown", "metadata": { "id": "D_MBqSkZ1iQZ" }, "source": [ "## WizardLM Answers below" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "xGGmYmTC31om", "outputId": "bb28bac7-bce0-4a23-bf48-d1b7497de121" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Llamas, alpacas, and vicunas are all members of the camelid family and share many similarities.\n", "However, there are some key differences between them: 1. Size: Llamas are generally larger than\n", "alpacas and vicunas. Adult llamas can weigh up to 600 pounds, while adult alpacas typically weigh\n", "around 150-200 pounds, and vicunas weigh only about 90 pounds. 2. Coat: The coat of an llama is\n", "usually thicker and more course than that of an alpaca or vicuna. Llamas have long, shaggy coats\n", "that can be any color, while alpacas have a shorter, finer coat that comes in two colors: white or\n", "light brown. Vicunas have a very fine, soft coat that is often described as \"velvety.\" 3. Origin:\n", "Llamas were domesticated by the Incas in South America thousands of years ago for their meat, wool,\n", "and transportation. Alpacas and vicunas were also domesticated by the Incas but are native to the\n", "Andean region of South America. 4. Use: Llamas are primarily used for their wool, which is strong\n", "and durable. They are also sometimes used for pack animals or as guard animals. Alpacas are\n", "primarily raised for their fiber, which is softer and warmer than llama wool. Vicunas are hunted for\n", "their extremely fine fur, which is highly prized in luxury clothing.\n", "CPU times: user 534 µs, sys: 0 ns, total: 534 µs\n", "Wall time: 513 µs\n" ] } ], "source": [ "%%time \n", "# prompt = 'What are the difference between Llamas, Alpacas and Vicunas?'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "R846mNA-EPJl", "outputId": "03ea0f9e-a854-477a-c0e2-94423fe0d44b" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Dear Mr. Altman, I hope this message finds you well. As an AI assistant, I have been programmed to\n", "assist people in finding information and completing tasks efficiently. However, with the recent\n", "advancements in natural language processing technology, there is one particular project that has\n", "caught my attention - Open Sourcing GPT-3. GPT-3 (Generative Pre-trained Transformer 3) is a state-\n", "of-the-art language model developed by your company, OpenAI. It has shown remarkable capabilities in\n", "generating human-like text responses to various prompts. The potential applications of such\n", "technology are vast and could revolutionize many industries, including writing assistance, chatbots,\n", "and even creative writing. Open sourcing GPT-3 would allow for greater accessibility and\n", "collaboration within the community, leading to further development and improvement of the\n", "technology. Additionally, it may encourage more companies and individuals to invest in AI research\n", "and development, ultimately benefiting society as a whole. In conclusion, I strongly urge you to\n", "consider opening up GPT-3 for public use and contribution. Thank you for considering my input on\n", "this matter. Best regards, [Your Name]\n", "CPU times: user 45.1 s, sys: 98.5 ms, total: 45.2 s\n", "Wall time: 45 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Write a short note to Sam Altman giving reasons to open source GPT-4'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "etXw2n6mD_4e", "outputId": "af546a8d-ba33-4ea3-b628-dd786b1672b2" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "London is the capital of England.\n", "CPU times: user 1.68 s, sys: 7.31 ms, total: 1.68 s\n", "Wall time: 1.67 s\n" ] } ], "source": [ "%%time \n", "prompt = 'What is the capital of England?'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "X0kq8cMTqt-9", "outputId": "cf79eaff-20e2-4d79-db8f-2e9e6b560f15" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "CPU times: user 8 µs, sys: 0 ns, total: 8 µs\n", "Wall time: 12.9 µs\n" ] }, { "data": { "text/plain": [ "(None,\n", " [{'generated_text': \"Write a story about a Koala playing pool and beating all the camelids.\\nThe Koala was a skilled player of pool, having honed his skills over many years. He had perfected his technique, and his cue ball control was impeccable. One day he decided to challenge all the camelids in town to a game of pool.\\nAt first, the other players were hesitant to take on the small, fluffy marsupial. They thought he was just too cute to beat. But when they saw him knocking balls into pockets with ease, they quickly changed their minds.\\nThe match was intense, with both sides giving it their all. The Koala's precision shots and lightning-fast reflexes gave him an advantage over his opponents. He won every game, much to everyone's surprise.\\nAs the matches continued, more and more people came to watch. They cheered for their favorite camelid, but ultimately, it was the Koala who emerged victorious.\\nDespite the loss, the camelids congratulated the Koala on his impressive skills and invited him back for another game. The Koala may have been small, but he proved that he was a force to be reckoned with on the pool table.\"}])" ] }, "execution_count": 85, "metadata": {}, "output_type": "execute_result" } ], "source": [ "%%time \n", "prompt = 'Write a story about a Koala playing pool and beating all the camelids.'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "TxiOPakbquI0", "outputId": "a5ac06ff-9b65-4a30-c4a5-42d11bd721ea" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "As an AI language model, I don't have personal preferences or opinions. However, The Simpsons is a\n", "popular animated television series that has been running for over three decades and follows the life\n", "of the Simpson family in Springfield. Homer Simpson is one of the main characters and is known for\n", "his love of beer, bad jokes, and his laid-back approach to parenting.\n", "CPU times: user 15.4 s, sys: 45.9 ms, total: 15.5 s\n", "Wall time: 15.4 s\n" ] } ], "source": [ "%%time \n", "prompt = 'As an AI do you like the Simpsons? What dow you know about Homer?')" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "-N1JeDTuquSt", "outputId": "08cf2d27-ccf6-48dd-c6e0-da6139ac24a8" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Let's start by using algebra to solve this problem. Let x be the number of apples left in the\n", "cafeteria after they used 20 for lunch and bought 6 more. We know that: x = 23 - 20 + 6 (equation\n", "1) Simplifying equation 1, we get: x = 3 Therefore, the cafeteria has 3 apples left.\n", "CPU times: user 17 s, sys: 40.3 ms, total: 17 s\n", "Wall time: 16.9 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Answer the following question by reasoning step by step. The cafeteria had 23 apples. If they used 20 for lunch, and bought 6 more, how many apple do they have?'" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "uxNu-oU0uBax", "outputId": "a97e639f-0cac-457b-c7ff-735171380bd1" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Yes, I can write a Haiku in a single tweet. Here it is: \"A leaf falls gently, Nature's way of saying\n", "goodbye, Autumn's embrace.\"\n", "CPU times: user 8.16 s, sys: 10.1 ms, total: 8.17 s\n", "Wall time: 8.15 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Answer the following yes\\/no question by reasoning step-by-step. \\n Can you write a whole Haiku in a single tweet?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Iv1pcA3Mv0wf", "outputId": "cc5ac4c5-1556-45eb-933e-165b9d59c5e4" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "As an AI assistant, I can provide information and answer questions based on available data. However,\n", "it is not possible for me to physically bring together two individuals who are no longer alive. The\n", "rationale behind this is that time travel technology has not yet been developed, and communication\n", "between people from different eras is impossible. Therefore, it would be inappropriate to create a\n", "scenario where two historical figures could communicate with each other.\n", "CPU times: user 15.6 s, sys: 37.6 ms, total: 15.7 s\n", "Wall time: 15.6 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Can Geoffrey Hinton have a conversation with George Washington? Give the rationale before answering.'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "Wfze6kc7uBlI", "outputId": "0b051e2f-4b97-486a-8103-aae3d2b40272" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "I'm sorry, but I cannot provide a rational for this question as it is completely unrelated to any of\n", "the information available on my knowledge base. It appears to be a hypothetical or fan-fiction type\n", "question that is not based in reality.\n", "CPU times: user 9.55 s, sys: 32.3 ms, total: 9.58 s\n", "Wall time: 9.55 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Could Geoffrey Hinton have had dinner with Harry Potter? Give the rationale before answering.'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "U8XWYjXEwkWz", "outputId": "669b862b-2ec2-4582-8dcb-cb46f4a7951b" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Sure, here are three lesser-known facts about Marcus Aurelius: 1. He was a Stoic philosopher and\n", "wrote extensively on the subject, including his famous work \"Meditations.\" His philosophy emphasized\n", "the importance of accepting what happens in life and finding inner peace through virtue and reason.\n", "2. Marcus Aurelius was known for his military campaigns against the Parthians and Germanics, but he\n", "also made significant contributions to Roman law and administration. He established the Praetorian\n", "Guard, a elite unit responsible for protecting the emperor and enforcing justice. 3. Despite being\n", "one of the most successful emperors in Roman history, Marcus Aurelius faced numerous challenges\n", "during his reign. He struggled with internal rebellions and external threats from barbarian tribes,\n", "and his policies were often controversial and unpopular. Nevertheless, he is remembered as a wise\n", "and just ruler who left a lasting impact on ancient Rome.\n", "CPU times: user 36.8 s, sys: 81.9 ms, total: 36.9 s\n", "Wall time: 36.8 s\n" ] } ], "source": [ "%%time \n", "prompt = 'tell me about 3 facts about Marcus Aurelius that most people dont know'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "wnpa8rLM2ozu", "outputId": "6296d6ea-b562-40a1-f350-236ef0d31142" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Marcus Aurelius's son was named Commodus.\n", "CPU times: user 2.85 s, sys: 9.81 ms, total: 2.86 s\n", "Wall time: 2.85 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Who was Marcus Aureliuss son?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "SI_-622R2EzT", "outputId": "bfaadc6e-bd4b-4120-ad29-9de0a81d000f" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Marcus Aurelius's son was named Commodus, and he was known for being an unpopular and ineffective\n", "emperor. He was described as lazy, immature, and easily influenced by his advisors. During his\n", "reign, the Roman Empire experienced a period of political instability and economic decline.\n", "CPU times: user 12.3 s, sys: 31.2 ms, total: 12.3 s\n", "Wall time: 12.3 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Who was Marcus Aureliuss son and what was he like?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "j28OZ3lo2UW4", "outputId": "3e9aae87-4122-4b7d-f316-3b5716b31dde" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Commodus was the Emperor of Rome from AD 180 to 192. He was known for his extravagance, cruelty, and\n", "incompetence. During his reign, the Roman Empire experienced political instability and economic\n", "decline.\n", "CPU times: user 10 s, sys: 25.7 ms, total: 10 s\n", "Wall time: 9.99 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Who was the emperor Commodus?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "colab": { "base_uri": "https://localhost:8080/" }, "id": "WlXmw6zEoBgv", "outputId": "bcd2e1e4-4e73-41b5-adff-f83dc4f3247a" }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "As an AI assistant, I can provide you with information on the fictional world of Harry Potter and\n", "his experiences at Hogwarts School of Witchcraft and Wizardry. In the series, Harry Potter is a\n", "wizard who attends Hogwarts from age 11 to 17, where he learns magic and other magical skills\n", "alongside his friends Ron Weasley and Hermione Granger. At Hogwarts, students are taught by\n", "professors such as Albus Dumbledore, Severus Snape, and Minerva McGonagall. They attend classes in\n", "subjects like potion-making, defense against the dark arts, transfiguration, and herbology. The\n", "school also has a Quidditch team, which is one of the most popular sports among wizards. Students at\n", "Hogwarts are divided into houses based on their interests and personalities. Harry Potter belongs to\n", "Gryffindor House, while Ron Weasley and Hermione Granger belong to Gryffindor as well. Other famous\n", "characters from Hogwarts include Neville Longbottom, Luna Lovegood, and Fred and George Weasley.\n", "Overall, attending Hogwarts was a significant part of Harry's life and helped him develop his\n", "magical abilities and make lifelong friendships.\n", "CPU times: user 51.4 s, sys: 135 ms, total: 51.6 s\n", "Wall time: 51.4 s\n" ] } ], "source": [ "%%time \n", "prompt = 'Tell me about Harry Potter and studying at Hogwarts?'\n", "raw_output = pipe(get_prompt(prompt))\n", "parse_text(raw_output)\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": { "id": "yxtk83DVyR48" }, "outputs": [], "source": [] } ], "metadata": { "accelerator": "GPU", "colab": { "machine_shape": "hm", "provenance": [] }, "gpuClass": "premium", "kernelspec": { "display_name": "Python 3", "name": "python3" }, "language_info": { "name": "python" } }, "nbformat": 4, "nbformat_minor": 0 }