diff --git a/Auto_Making/API_8_3_Financial_Model_Prompt.ipynb b/Auto_Making/API_8_3_Financial_Model_Prompt.ipynb new file mode 100644 index 000000000..6803eea76 --- /dev/null +++ b/Auto_Making/API_8_3_Financial_Model_Prompt.ipynb @@ -0,0 +1,292 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "code", + "source": [], + "metadata": { + "id": "Khj3UP-O_6aQ" + }, + "execution_count": 1, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "!pip install openai pdfkit python-docx" + ], + "metadata": { + "id": "eo0OhdMT_7SQ", + "outputId": "4988ccb9-3327-4076-ac66-ebd4b522084f", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting openai\n", + " Using cached openai-1.3.8-py3-none-any.whl (221 kB)\n", + "Collecting pdfkit\n", + " Downloading pdfkit-1.0.0-py3-none-any.whl (12 kB)\n", + "Collecting python-docx\n", + " Downloading python_docx-1.1.0-py3-none-any.whl (239 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m239.6/239.6 kB\u001b[0m \u001b[31m2.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: anyio<5,>=3.5.0 in /usr/local/lib/python3.10/dist-packages (from openai) (3.7.1)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /usr/lib/python3/dist-packages (from openai) (1.7.0)\n", + "Collecting httpx<1,>=0.23.0 (from openai)\n", + " Downloading httpx-0.25.2-py3-none-any.whl (74 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.0/75.0 kB\u001b[0m \u001b[31m1.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/lib/python3.10/dist-packages (from openai) (1.10.13)\n", + "Requirement already satisfied: sniffio in /usr/local/lib/python3.10/dist-packages (from openai) (1.3.0)\n", + "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.10/dist-packages (from openai) (4.66.1)\n", + "Requirement already satisfied: typing-extensions<5,>=4.5 in /usr/local/lib/python3.10/dist-packages (from openai) (4.5.0)\n", + "Requirement already satisfied: lxml>=3.1.0 in /usr/local/lib/python3.10/dist-packages (from python-docx) (4.9.3)\n", + "Requirement already satisfied: idna>=2.8 in /usr/local/lib/python3.10/dist-packages (from anyio<5,>=3.5.0->openai) (3.6)\n", + "Requirement already satisfied: exceptiongroup in /usr/local/lib/python3.10/dist-packages (from anyio<5,>=3.5.0->openai) (1.2.0)\n", + "Requirement already satisfied: certifi in /usr/local/lib/python3.10/dist-packages (from httpx<1,>=0.23.0->openai) (2023.11.17)\n", + "Collecting httpcore==1.* (from httpx<1,>=0.23.0->openai)\n", + " Downloading httpcore-1.0.2-py3-none-any.whl (76 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m76.9/76.9 kB\u001b[0m \u001b[31m8.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting h11<0.15,>=0.13 (from httpcore==1.*->httpx<1,>=0.23.0->openai)\n", + " Downloading h11-0.14.0-py3-none-any.whl (58 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m6.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hInstalling collected packages: pdfkit, python-docx, h11, httpcore, httpx, openai\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "llmx 0.0.15a0 requires cohere, which is not installed.\n", + "llmx 0.0.15a0 requires tiktoken, which is not installed.\u001b[0m\u001b[31m\n", + "\u001b[0mSuccessfully installed h11-0.14.0 httpcore-1.0.2 httpx-0.25.2 openai-1.3.8 pdfkit-1.0.0 python-docx-1.1.0\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + " # @title (Please insert your request to be done by this code at below form:👇👇)\n", + "TOPIC = \"PersonalityInteractions\" # @param {type:\"string\"}\n", + "PARAGRAPH = \"Use game theory to understand how different personality types (Dark Triad, Dark Tetrad, Dark Empathy) might interact within a society \\u003CINFO> PowerPoint\" # @param {type:\"string\"}\n", + "role = \"startup Entrepreneur\"# @param {type:\"string\"}\n", + "\n", + "\n", + "Your_Email = \"hh@gmail.com\" # @param {type:\"string\"}\n", + "\n", + "openai_api = \"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\" # @param {type:\"string\"}\n", + "\n", + "\n", + "#!export OPENAI_API_KEY = openai_api\n", + "\n", + "import os\n", + "\n", + "os.environ['OPENAI_API_KEY'] = openai_api #'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'" + ], + "metadata": { + "id": "vRIB557vAbP0" + }, + "execution_count": 8, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "!pip install python-dotenv" + ], + "metadata": { + "id": "3BF-3djRAkVu", + "outputId": "8a4f229b-ae56-4156-bf32-05bafa9c6e83", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 4, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting python-dotenv\n", + " Downloading python_dotenv-1.0.0-py3-none-any.whl (19 kB)\n", + "Installing collected packages: python-dotenv\n", + "Successfully installed python-dotenv-1.0.0\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "'''\n", + "This file contains the ChatGP class that generates a business plan using OpenAI's GPT model.\n", + "'''\n", + "import openai\n", + "import pdfkit\n", + "from docx import Document\n", + "from dotenv import load_dotenv\n", + "import os\n", + "class ChatGP:\n", + " def __init__(self):\n", + " load_dotenv()\n", + " self.api_key = os.getenv('OPENAI_API_KEY')\n", + " def generate_business_plan(self, topic, description):\n", + " prompts = [\n", + " \"1. Executive Summary:\",\n", + " \"2. Company Description:\",\n", + " \"3. Market Analysis:\",\n", + " \"4. Organization and Management:\",\n", + " \"5. Product or Service Line:\",\n", + " \"6. Marketing and Sales Strategy:\",\n", + " \"7. Funding Request:\",\n", + " \"8. Financial Projections:\",\n", + " \"9. Appendix:\",\n", + " \"10. Conclusion:\"\n", + " ]\n", + " results = []\n", + " for i, prompt in enumerate(prompts):\n", + " response = self.generate_response(prompt, topic, description)\n", + " result = f\"{i+1}. {prompt}\\n{response}\"\n", + " results.append(result)\n", + " self.save_business_plan(results)\n", + " return results\n", + " def generate_response(self, prompt, topic, description):\n", + " openai.api_key = self.api_key\n", + " response = openai.Completion.create(\n", + " engine='text-davinci-003',\n", + " prompt=f\"{prompt} {topic} {description}\",\n", + " max_tokens=100\n", + " )\n", + " return response.choices[0].text.strip()\n", + " def save_business_plan(self, results):\n", + " doc = Document()\n", + " for result in results:\n", + " title, content = result.split('\\n', 1)\n", + " subtitle, numbering = title.split('. ', 1)\n", + " doc.add_heading(subtitle, level=1)\n", + " doc.add_heading(numbering, level=2)\n", + " doc.add_paragraph(content)\n", + " doc.save('business_plan.docx')\n", + " pdfkit.from_file('business_plan.docx', 'business_plan.pdf')" + ], + "metadata": { + "id": "DlE3kHlfAL0n" + }, + "execution_count": 5, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "topic = TOPIC # request.form['topic']\n", + "description = PARAGRAPH # request.form['description']\n", + "chatgp = ChatGP()\n", + "results = chatgp.generate_business_plan(topic, description)" + ], + "metadata": { + "id": "_bQqVCZnEDWD", + "outputId": "5e21176c-f20b-432a-e38c-e891d2bca747", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + } + }, + "execution_count": 9, + "outputs": [ + { + "output_type": "error", + "ename": "APIRemovedInV1", + "evalue": "ignored", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAPIRemovedInV1\u001b[0m Traceback (most recent call last)", + "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mdescription\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mPARAGRAPH\u001b[0m \u001b[0;31m# request.form['description']\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mchatgp\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mChatGP\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mchatgp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgenerate_business_plan\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtopic\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdescription\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", + "\u001b[0;32m\u001b[0m in \u001b[0;36mgenerate_business_plan\u001b[0;34m(self, topic, description)\u001b[0m\n\u001b[1;32m 26\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 27\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mprompt\u001b[0m \u001b[0;32min\u001b[0m \u001b[0menumerate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprompts\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 28\u001b[0;31m \u001b[0mresponse\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mgenerate_response\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mprompt\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtopic\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdescription\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 29\u001b[0m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34mf\"{i+1}. {prompt}\\n{response}\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[0mresults\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mappend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mresult\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m\u001b[0m in \u001b[0;36mgenerate_response\u001b[0;34m(self, prompt, topic, description)\u001b[0m\n\u001b[1;32m 33\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mgenerate_response\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mprompt\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtopic\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdescription\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 34\u001b[0m \u001b[0mopenai\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mapi_key\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mapi_key\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 35\u001b[0;31m response = openai.Completion.create(\n\u001b[0m\u001b[1;32m 36\u001b[0m \u001b[0mengine\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m'text-davinci-003'\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[0mprompt\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34mf\"{prompt} {topic} {description}\"\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;32m/usr/local/lib/python3.10/dist-packages/openai/lib/_old_api.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *_args, **_kwargs)\u001b[0m\n\u001b[1;32m 37\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 38\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__call__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0m_args\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0m_kwargs\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m->\u001b[0m \u001b[0mAny\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 39\u001b[0;31m \u001b[0;32mraise\u001b[0m \u001b[0mAPIRemovedInV1\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msymbol\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_symbol\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 40\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 41\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0;31mAPIRemovedInV1\u001b[0m: \n\nYou tried to access openai.Completion, but this is no longer supported in openai>=1.0.0 - see the README at https://github.com/openai/openai-python for the API.\n\nYou can run `openai migrate` to automatically upgrade your codebase to use the 1.0.0 interface. \n\nAlternatively, you can pin your installation to the old version, e.g. `pip install openai==0.28`\n\nA detailed migration guide is available here: https://github.com/openai/openai-python/discussions/742\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "'''\n", + "This file contains the main Flask application for the ChatGP business plan generator.\n", + "'''\n", + "from flask import Flask, render_template, request, redirect, url_for\n", + "#from chatgp import ChatGP\n", + "\n", + "\n", + "app = Flask(__name__)\n", + "@app.route('/')\n", + "def index():\n", + " return render_template('index.html')\n", + "@app.route('/generate', methods=['POST'])\n", + "def generate():\n", + " topic = request.form['topic']\n", + " description = request.form['description']\n", + " chatgp = ChatGP()\n", + " results = chatgp.generate_business_plan(topic, description)\n", + " return render_template('results.html', results=results)\n", + "@app.route('/send_email', methods=['POST'])\n", + "def send_email():\n", + " email = request.form['email']\n", + " # Add code to send the generated business plan to the specified email address\n", + " return redirect(url_for('index'))\n", + "if __name__ == '__main__':\n", + " app.run()" + ], + "metadata": { + "id": "t80qXWMOANAH", + "outputId": "aa8dba64-8517-4e8a-be29-65b12a1b2587", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "execution_count": 6, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + " * Serving Flask app '__main__'\n", + " * Debug mode: off\n" + ] + }, + { + "output_type": "stream", + "name": "stderr", + "text": [ + "INFO:werkzeug:\u001b[31m\u001b[1mWARNING: This is a development server. Do not use it in a production deployment. Use a production WSGI server instead.\u001b[0m\n", + " * Running on http://127.0.0.1:5000\n", + "INFO:werkzeug:\u001b[33mPress CTRL+C to quit\u001b[0m\n" + ] + } + ] + } + ], + "metadata": { + "colab": { + "provenance": [], + "include_colab_link": true + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "accelerator": "TPU" + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/DockerFile b/DockerFile new file mode 100644 index 000000000..0782fde56 --- /dev/null +++ b/DockerFile @@ -0,0 +1,27 @@ +# Start with a Python 3.9 base image +FROM python:3.9-slim + +# Set the working directory in the container +WORKDIR /app + +# Copy the current directory contents into the container at /app +COPY . /app + +# Install necessary libraries for GUI support +RUN apt-get update && apt-get install -y python3-tk x11-apps + +# Install the project dependencies +RUN pip install --no-cache-dir -r requirements.txt + +# Install HuggingFace Transformers and Uvicorn server +RUN pip install transformers uvicorn + +# Set the environment variable for OpenAI API key +# (you'll need to provide the actual key when running the container) +ENV OPENAI_API_KEY=your_OpenAI_API_key + +# Expose the port for Uvicorn server +EXPOSE 7860 + +# Command to run the Uvicorn server with your FastAPI application +CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "7860"] diff --git a/Dockerfile b/Dockerfile_main similarity index 96% rename from Dockerfile rename to Dockerfile_main index 3a9df3c6d..447570ab9 100644 --- a/Dockerfile +++ b/Dockerfile_main @@ -21,4 +21,4 @@ ENV OPENAI_API_KEY=your_OpenAI_API_key EXPOSE 8000 # Set an entry point that runs a shell for interactive mode -ENTRYPOINT ["/bin/bash"] \ No newline at end of file +ENTRYPOINT ["/bin/bash"] diff --git a/NoteBook/ChatDev_1.ipynb b/NoteBook/ChatDev_1.ipynb new file mode 100644 index 000000000..8218d5e94 --- /dev/null +++ b/NoteBook/ChatDev_1.ipynb @@ -0,0 +1,535 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "source": [ + "#Communicative Agents for Software Development\n", + "\n", + "https://github.com/OpenBMB/ChatDev" + ], + "metadata": { + "id": "PDvXCooLMtst" + }, + "id": "PDvXCooLMtst" + }, + { + "cell_type": "markdown", + "source": [ + "![enter image description here](https://i.stack.imgur.com/nORyw.jpg)" + ], + "metadata": { + "id": "GBrp6O0KM42b" + }, + "id": "GBrp6O0KM42b" + }, + { + "cell_type": "code", + "source": [ + "\n", + "from google.colab import drive\n", + "import os\n", + " # Mount Google Drive\n", + "if not os.path.isdir('/content/drive'):\n", + " # If not, mount the drive\n", + " drive.mount('/content/drive')\n", + "else:\n", + " print(\"Drive is already mounted.\")\n", + "\n", + "dst = '/content/drive/MyDrive/ChatGPT_Paper_wrting/ChatDev_projec'\n", + "if not os.path.exists(dst):\n", + " os.makedirs(dst)\n", + "\n", + "os.chdir(dst)\n", + "print ( 'current directory is :', os. getcwd())" + ], + "metadata": { + "id": "LypJ6L9RwaM2", + "outputId": "24f4a5a0-4fae-4621-b046-71e04b4b1514", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "id": "LypJ6L9RwaM2", + "execution_count": 14, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Drive is already mounted.\n", + "current directory is : /content/drive/MyDrive/ChatGPT_Paper_wrting/ChatDev_projec\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!git clone https://github.com/OpenBMB/ChatDev.git" + ], + "metadata": { + "id": "vJYsSBgtMt6-", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "334266c8-18b1-4534-cbf1-2e71a972e031" + }, + "id": "vJYsSBgtMt6-", + "execution_count": 15, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "fatal: destination path 'ChatDev' already exists and is not an empty directory.\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!sudo apt-get install conda" + ], + "metadata": { + "id": "2v7dsRxSNjKQ", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "1016abd4-1585-4ceb-c062-a2ab68e0bd2e" + }, + "id": "2v7dsRxSNjKQ", + "execution_count": 16, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Reading package lists... Done\n", + "Building dependency tree... Done\n", + "Reading state information... Done\n", + "E: Unable to locate package conda\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!conda create -n ChatDev_conda_env python=3.9 -y\n", + "!conda activate ChatDev_conda_env" + ], + "metadata": { + "id": "IDhB_ZN7NdoM", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "b4646bcb-0e54-4c8c-da7c-1564b9274432" + }, + "id": "IDhB_ZN7NdoM", + "execution_count": 17, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/bin/bash: line 1: conda: command not found\n", + "/bin/bash: line 1: conda: command not found\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "%cd ChatDev\n", + "!pip3 install -r requirements.txt" + ], + "metadata": { + "id": "LLd6MBw7NsaT", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "0edd72a7-e640-441b-b15a-befaf0a4a33e" + }, + "id": "LLd6MBw7NsaT", + "execution_count": 18, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "/content/drive/MyDrive/ChatGPT_Paper_wrting/ChatDev_projec/ChatDev\n", + "Collecting colorama==0.4.6 (from -r requirements.txt (line 1))\n", + " Downloading colorama-0.4.6-py2.py3-none-any.whl (25 kB)\n", + "Collecting Flask==2.3.2 (from -r requirements.txt (line 2))\n", + " Downloading Flask-2.3.2-py3-none-any.whl (96 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m96.9/96.9 kB\u001b[0m \u001b[31m3.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting Flask-SocketIO==5.3.4 (from -r requirements.txt (line 3))\n", + " Downloading Flask_SocketIO-5.3.4-py3-none-any.whl (17 kB)\n", + "Collecting importlib-metadata==6.8.0 (from -r requirements.txt (line 4))\n", + " Downloading importlib_metadata-6.8.0-py3-none-any.whl (22 kB)\n", + "Collecting numpy==1.24.3 (from -r requirements.txt (line 5))\n", + " Downloading numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.3 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m17.3/17.3 MB\u001b[0m \u001b[31m21.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting openai==1.3.3 (from -r requirements.txt (line 6))\n", + " Downloading openai-1.3.3-py3-none-any.whl (220 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m220.3/220.3 kB\u001b[0m \u001b[31m18.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: regex==2023.6.3 in /usr/local/lib/python3.10/dist-packages (from -r requirements.txt (line 7)) (2023.6.3)\n", + "Requirement already satisfied: requests==2.31.0 in /usr/local/lib/python3.10/dist-packages (from -r requirements.txt (line 8)) (2.31.0)\n", + "Collecting tenacity==8.2.2 (from -r requirements.txt (line 9))\n", + " Downloading tenacity-8.2.2-py3-none-any.whl (24 kB)\n", + "Collecting tiktoken==0.4.0 (from -r requirements.txt (line 10))\n", + " Downloading tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.7 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.7/1.7 MB\u001b[0m \u001b[31m45.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting virtualenv==20.23.0 (from -r requirements.txt (line 11))\n", + " Downloading virtualenv-20.23.0-py3-none-any.whl (3.3 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.3/3.3 MB\u001b[0m \u001b[31m50.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting Werkzeug==2.3.6 (from -r requirements.txt (line 12))\n", + " Downloading Werkzeug-2.3.6-py3-none-any.whl (242 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m242.5/242.5 kB\u001b[0m \u001b[31m22.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting Markdown==3.4.4 (from -r requirements.txt (line 13))\n", + " Downloading Markdown-3.4.4-py3-none-any.whl (94 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m94.2/94.2 kB\u001b[0m \u001b[31m7.3 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting Pillow==10.1.0 (from -r requirements.txt (line 14))\n", + " Downloading Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl (3.6 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m3.6/3.6 MB\u001b[0m \u001b[31m41.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: Jinja2>=3.1.2 in /usr/local/lib/python3.10/dist-packages (from Flask==2.3.2->-r requirements.txt (line 2)) (3.1.2)\n", + "Requirement already satisfied: itsdangerous>=2.1.2 in /usr/local/lib/python3.10/dist-packages (from Flask==2.3.2->-r requirements.txt (line 2)) (2.1.2)\n", + "Requirement already satisfied: click>=8.1.3 in /usr/local/lib/python3.10/dist-packages (from Flask==2.3.2->-r requirements.txt (line 2)) (8.1.7)\n", + "Collecting blinker>=1.6.2 (from Flask==2.3.2->-r requirements.txt (line 2))\n", + " Downloading blinker-1.7.0-py3-none-any.whl (13 kB)\n", + "Collecting python-socketio>=5.0.2 (from Flask-SocketIO==5.3.4->-r requirements.txt (line 3))\n", + " Downloading python_socketio-5.10.0-py3-none-any.whl (74 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m74.4/74.4 kB\u001b[0m \u001b[31m9.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.10/dist-packages (from importlib-metadata==6.8.0->-r requirements.txt (line 4)) (3.17.0)\n", + "Requirement already satisfied: anyio<4,>=3.5.0 in /usr/local/lib/python3.10/dist-packages (from openai==1.3.3->-r requirements.txt (line 6)) (3.7.1)\n", + "Requirement already satisfied: distro<2,>=1.7.0 in /usr/lib/python3/dist-packages (from openai==1.3.3->-r requirements.txt (line 6)) (1.7.0)\n", + "Collecting httpx<1,>=0.23.0 (from openai==1.3.3->-r requirements.txt (line 6))\n", + " Downloading httpx-0.26.0-py3-none-any.whl (75 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m75.9/75.9 kB\u001b[0m \u001b[31m7.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: pydantic<3,>=1.9.0 in /usr/local/lib/python3.10/dist-packages (from openai==1.3.3->-r requirements.txt (line 6)) (1.10.13)\n", + "Requirement already satisfied: tqdm>4 in /usr/local/lib/python3.10/dist-packages (from openai==1.3.3->-r requirements.txt (line 6)) (4.66.1)\n", + "Requirement already satisfied: typing-extensions<5,>=4.5 in /usr/local/lib/python3.10/dist-packages (from openai==1.3.3->-r requirements.txt (line 6)) (4.5.0)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests==2.31.0->-r requirements.txt (line 8)) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests==2.31.0->-r requirements.txt (line 8)) (3.6)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests==2.31.0->-r requirements.txt (line 8)) (2.0.7)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests==2.31.0->-r requirements.txt (line 8)) (2023.11.17)\n", + "Collecting distlib<1,>=0.3.6 (from virtualenv==20.23.0->-r requirements.txt (line 11))\n", + " Downloading distlib-0.3.8-py2.py3-none-any.whl (468 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m468.9/468.9 kB\u001b[0m \u001b[31m29.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: filelock<4,>=3.11 in /usr/local/lib/python3.10/dist-packages (from virtualenv==20.23.0->-r requirements.txt (line 11)) (3.13.1)\n", + "Collecting platformdirs<4,>=3.2 (from virtualenv==20.23.0->-r requirements.txt (line 11))\n", + " Downloading platformdirs-3.11.0-py3-none-any.whl (17 kB)\n", + "Requirement already satisfied: MarkupSafe>=2.1.1 in /usr/local/lib/python3.10/dist-packages (from Werkzeug==2.3.6->-r requirements.txt (line 12)) (2.1.3)\n", + "Requirement already satisfied: sniffio>=1.1 in /usr/local/lib/python3.10/dist-packages (from anyio<4,>=3.5.0->openai==1.3.3->-r requirements.txt (line 6)) (1.3.0)\n", + "Requirement already satisfied: exceptiongroup in /usr/local/lib/python3.10/dist-packages (from anyio<4,>=3.5.0->openai==1.3.3->-r requirements.txt (line 6)) (1.2.0)\n", + "Collecting httpcore==1.* (from httpx<1,>=0.23.0->openai==1.3.3->-r requirements.txt (line 6))\n", + " Downloading httpcore-1.0.2-py3-none-any.whl (76 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m76.9/76.9 kB\u001b[0m \u001b[31m8.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting h11<0.15,>=0.13 (from httpcore==1.*->httpx<1,>=0.23.0->openai==1.3.3->-r requirements.txt (line 6))\n", + " Downloading h11-0.14.0-py3-none-any.whl (58 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m58.3/58.3 kB\u001b[0m \u001b[31m6.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: bidict>=0.21.0 in /usr/local/lib/python3.10/dist-packages (from python-socketio>=5.0.2->Flask-SocketIO==5.3.4->-r requirements.txt (line 3)) (0.22.1)\n", + "Collecting python-engineio>=4.8.0 (from python-socketio>=5.0.2->Flask-SocketIO==5.3.4->-r requirements.txt (line 3))\n", + " Downloading python_engineio-4.8.0-py3-none-any.whl (56 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m56.8/56.8 kB\u001b[0m \u001b[31m6.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting simple-websocket>=0.10.0 (from python-engineio>=4.8.0->python-socketio>=5.0.2->Flask-SocketIO==5.3.4->-r requirements.txt (line 3))\n", + " Downloading simple_websocket-1.0.0-py3-none-any.whl (13 kB)\n", + "Collecting wsproto (from simple-websocket>=0.10.0->python-engineio>=4.8.0->python-socketio>=5.0.2->Flask-SocketIO==5.3.4->-r requirements.txt (line 3))\n", + " Downloading wsproto-1.2.0-py3-none-any.whl (24 kB)\n", + "Installing collected packages: distlib, Werkzeug, tenacity, platformdirs, Pillow, numpy, Markdown, importlib-metadata, h11, colorama, blinker, wsproto, virtualenv, tiktoken, httpcore, Flask, simple-websocket, httpx, python-engineio, openai, python-socketio, Flask-SocketIO\n", + " Attempting uninstall: Werkzeug\n", + " Found existing installation: Werkzeug 3.0.1\n", + " Uninstalling Werkzeug-3.0.1:\n", + " Successfully uninstalled Werkzeug-3.0.1\n", + " Attempting uninstall: tenacity\n", + " Found existing installation: tenacity 8.2.3\n", + " Uninstalling tenacity-8.2.3:\n", + " Successfully uninstalled tenacity-8.2.3\n", + " Attempting uninstall: platformdirs\n", + " Found existing installation: platformdirs 4.1.0\n", + " Uninstalling platformdirs-4.1.0:\n", + " Successfully uninstalled platformdirs-4.1.0\n", + " Attempting uninstall: Pillow\n", + " Found existing installation: Pillow 9.4.0\n", + " Uninstalling Pillow-9.4.0:\n", + " Successfully uninstalled Pillow-9.4.0\n", + " Attempting uninstall: numpy\n", + " Found existing installation: numpy 1.23.5\n", + " Uninstalling numpy-1.23.5:\n", + " Successfully uninstalled numpy-1.23.5\n", + " Attempting uninstall: Markdown\n", + " Found existing installation: Markdown 3.5.1\n", + " Uninstalling Markdown-3.5.1:\n", + " Successfully uninstalled Markdown-3.5.1\n", + " Attempting uninstall: importlib-metadata\n", + " Found existing installation: importlib-metadata 7.0.0\n", + " Uninstalling importlib-metadata-7.0.0:\n", + " Successfully uninstalled importlib-metadata-7.0.0\n", + " Attempting uninstall: blinker\n", + " Found existing installation: blinker 1.4\n", + "\u001b[31mERROR: Cannot uninstall 'blinker'. It is a distutils installed project and thus we cannot accurately determine which files belong to it which would lead to only a partial uninstall.\u001b[0m\u001b[31m\n", + "\u001b[0m" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + "!pip3 install openai==0.28 tiktoken tenacity" + ], + "metadata": { + "id": "3JKXhs5GP3G9", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "941099cb-3f89-40b2-8235-7347b043e17a" + }, + "id": "3JKXhs5GP3G9", + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Collecting openai==0.28\n", + " Downloading openai-0.28.0-py3-none-any.whl (76 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m76.5/76.5 kB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting tiktoken\n", + " Downloading tiktoken-0.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (2.0 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m2.0/2.0 MB\u001b[0m \u001b[31m39.0 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hRequirement already satisfied: tenacity in /usr/local/lib/python3.10/dist-packages (8.2.2)\n", + "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.10/dist-packages (from openai==0.28) (2.31.0)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from openai==0.28) (4.66.1)\n", + "Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from openai==0.28) (3.9.1)\n", + "Requirement already satisfied: regex>=2022.1.18 in /usr/local/lib/python3.10/dist-packages (from tiktoken) (2023.6.3)\n", + "Requirement already satisfied: charset-normalizer<4,>=2 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->openai==0.28) (3.3.2)\n", + "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->openai==0.28) (3.6)\n", + "Requirement already satisfied: urllib3<3,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->openai==0.28) (2.0.7)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->openai==0.28) (2023.11.17)\n", + "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai==0.28) (23.1.0)\n", + "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai==0.28) (6.0.4)\n", + "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai==0.28) (1.9.4)\n", + "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai==0.28) (1.4.1)\n", + "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai==0.28) (1.3.1)\n", + "Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai==0.28) (4.0.3)\n" + ] + } + ] + }, + { + "cell_type": "code", + "source": [ + " # @title (Please insert your request to be done by this code at below form:👇👇)\n", + "Topic = \"Simulating Social Movements with Python and ChatArena\" # @param {type:\"string\"}\n", + "PARAGRAPH = \"Title: \\\"Comparing Common Scenes: Iran and the USA\\\" Introduction: In this post, we will compare the common scenes of Iran and the USA, focusing on various aspects such as political systems, religion, human rights, economic systems, education, healthcare, gender equality, media freedom, technology development, and cultural diversity. Additionally, we will explore the concept of Iran's desire to become free from religion and embrace a secular system. 1. Political System: Iran: Iran aims to achieve freedom from Islamofacism, preserving its Islamic identity while striving for political stability and governance. USA: The USA emphasizes freedom from monarchy, establishing a democratic republic that values individual rights and democratic principles. 2. Religion: Iran: Iran is predominantly Islamic, with Islam playing a significant role in its culture, society, and governance. USA: The USA upholds freedom of religion, allowing individuals to practice any religion of their choice, promoting religious diversity and tolerance. 3. Human Rights: Iran: Human rights in Iran vary, with limited freedom of expression and certain restrictions on civil liberties. USA: The USA strongly protects and promotes human rights, ensuring fundamental freedoms and civil liberties for its citizens. 4. Economic System: Iran: Iran follows a mixed economy, combining elements of state control and private enterprise. USA: The USA operates under a capitalist economy, emphasizing free-market principles and private enterprise. 5. Education: Iran: Iran has both public and private schools available, providing educational opportunities to its population. USA: The USA offers extensive public and private education systems, focusing on providing quality education to its citizens. 6. Healthcare: Iran: Iran has a universal healthcare system, aiming to provide healthcare access to all its citizens. USA: The USA has a mixed healthcare system, combining public and private options to ensure healthcare coverage for its population. 7. Gender Equality: Iran: Gender equality in Iran is limited, with traditional gender roles prevailing in society. USA: The USA places a strong emphasis on gender equality, striving for equal rights and opportunities for all genders. 8. Media Freedom: Iran: Media freedom in Iran is restricted, with limited press freedom and certain regulations on media outlets. USA: The USA guarantees freedom of the press and speech, promoting a free and independent media landscape. 9. Technology Development: Iran: Iran is actively developing its technology sector, focusing on advancements and innovation in various fields. USA: The USA is known for its advanced technology and innovation, leading in technological advancements globally. 10. Cultural Diversity: Iran: Iran boasts a rich cultural heritage, with diverse traditions, customs, and historical influences. USA: The USA is a melting pot of diverse cultures, embracing cultural diversity and celebrating multiculturalism. Conclusion: The comparison of common scenes between Iran and the USA highlights the differences and similarities in various aspects of their societies. While Iran aims to preserve its Islamic identity, there is also a growing desire for freedom from religion and the adoption of a secular system. Understanding these common scenes helps us appreciate the unique characteristics and aspirations of each country. Disclaimer: This post is based on general observations and may not capture all aspects or nuances of the common scenes in Iran and the USA.\" # @param {type:\"string\"}\n", + "role = \"game theory code writing by ChatArena \"# @param {type:\"string\"}\n", + "\n", + "\n", + "Your_Email = \"hh@gmail.com\" # @param {type:\"string\"}\n", + "\n", + "openai_api = \"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\" # @param {type:\"string\"}\n", + "\n", + "\n", + "#!export OPENAI_API_KEY = openai_api\n", + "\n", + "import os\n", + "\n", + "os.environ['OPENAI_API_KEY'] = openai_api #'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'" + ], + "metadata": { + "id": "6dGOPiCyOzRx" + }, + "id": "6dGOPiCyOzRx", + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "!pip install huggingface\n", + "!sudo apt install git-lfs\n", + "!git lfs install" + ], + "metadata": { + "id": "-AecMpQ5lKP4" + }, + "id": "-AecMpQ5lKP4", + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "\n", + "\n", + "!huggingface-cli login --token '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'\n", + "\n", + "!git remote add space https://huggingface.co/spaces/your_HF_USERNAME/your_SPACE_NAME\n", + "!git push --force space main" + ], + "metadata": { + "id": "rwbieSlIlCLt" + }, + "id": "rwbieSlIlCLt", + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "source": [ + "\n", + "\n", + "\n", + "\n", + "https://www.phind.com/search?cache=esnrf1bblvwbp4q4sp3drbs6\n", + "\n", + "Based on your project description and the search results, it seems like you want to generate a PowerPoint presentation from the results of your simulation using the ChatDev software.\n", + "\n", + "According to the ChatDev documentation, you can specify the product modality as PowerPoint in the task description. You can do this by adding ` PowerPoint` at the end of your task description.\n", + "\n", + "Here's how you can update the command:\n", + "\n", + "```bash\n", + "python3 run.py --task \"Use game theory to understand how different personality types (Dark Triad, Dark Tetrad, Dark Empathy) might interact within a society PowerPoint\" --name \"PersonalityInteractions\"\n", + "```\n", + "\n", + "This command will initiate the building of your software with the task description being the application of game theory to understand the interactions of different personality types and the final product modality being PowerPoint. The project name is \"PersonalityInteractions\".\n", + "\n", + "Remember to replace `\"PersonalityInteractions\"` with your desired project name. Also, you need to make sure that you have defined the roles and behaviors of the personality types in your project configuration.\n", + "\n", + "This command will create a corresponding folder in the WareHouse directory, which will contain all the files and documents related to the software, configuration files of the company responsible for this software, a comprehensive log detailing the software's building process, and the initial prompt used to create this software [Source 1](https://github.com/OpenBMB/ChatDev).\n", + "\n", + "After the software is built and run, you should be able to find a PowerPoint file in the WareHouse directory under the specific project folder, such as `project_name_DefaultOrganization_timestamp`. This PowerPoint file will contain the results of your simulation [Source 1](https://github.com/OpenBMB/ChatDev)." + ], + "metadata": { + "id": "D3OrVwWOvOf7" + }, + "id": "D3OrVwWOvOf7" + }, + { + "cell_type": "code", + "source": [ + "#!python3 run.py --task \"[description_of_your_idea]\" --name \"[project_name]\"\n", + "\n", + "#!python3 run.py --task \"theory to understand how different personality types (Dark Triad, Dark Tetrad, Dark Empathy) might interact within a society PowerPoint\" --name \"PersonalityInteractions\"" + ], + "metadata": { + "id": "fvanVNknPQAP" + }, + "id": "fvanVNknPQAP", + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "#%cd /content/ChatDev\n", + "#TOPIC = 'business plan chatgpt'\n", + "#PARAGRAPH= 'creat one Openai ChatGP pyhton program as webpage which be run in Huggingface space by docker file and recive the openai API and the the two topic and description variable and then act like AutoGPT but for writing the Business Plan report with around 10 prompt and then results must be saved as standard business plan with title , subtitle and numbering the results of the 10 prompt, so the official form of final repost is important and it must be in the form of pdf and doc saved and the Huggingface face mist have one email form to send this pdf and doc file to the given email of user'\n", + "!python3 run.py --task f\"\"\"{PARAGRAPH}\"\"\" --name f\"\"\"{Topic}\"\"\"" + ], + "metadata": { + "id": "OPdqUwuWjNv0" + }, + "id": "OPdqUwuWjNv0", + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "\n", + "src = '/content/ChatDev/WareHouse/'\n", + "\n", + "scr2 = f'{src}'+'/chatdev2huggingface_DefaultOrganization_20231212154538'\n", + "%cd '{scr2}'\n", + "!pwd\n", + "!ls\n", + "\n", + "!python3 -m venv env\n", + "!source env/bin/activate\n", + "!pip3 install -r requirements.txt\n", + "!python3 '{scr2}/'main.py" + ], + "metadata": { + "id": "Gccm9EZGUgKj" + }, + "id": "Gccm9EZGUgKj", + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "\n", + "!pwd\n", + "!python3 '{scr2}/'main.py" + ], + "metadata": { + "id": "fnLQ15g-wluo" + }, + "id": "fnLQ15g-wluo", + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "source": [ + "\n", + "dst = '/content/drive/MyDrive/ChatGPT_Paper_wrting/ChatDev'\n", + "if not os.path.exists(dst):\n", + " os.makedirs(dst)\n", + "!cp -r '{src}' '{dst}'\n", + "\n", + "#src = '/content/ChatDev/ChatDev/WareHouse/PersonalityInteractions_DefaultOrganization_20231212111412'\n", + "\n", + "import shutil\n", + "#shutil.copytree(src, dst)" + ], + "metadata": { + "id": "lrjMzdKmyzIB" + }, + "id": "lrjMzdKmyzIB", + "execution_count": null, + "outputs": [] + } + ], + "metadata": { + "colab": { + "provenance": [], + "include_colab_link": true + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.13" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} \ No newline at end of file