Bladeren bron

Small text fixes

Artem Chumachenko 2 jaren geleden
bovenliggende
commit
749a498c1f
3 gewijzigde bestanden met toevoegingen van 12 en 14 verwijderingen
  1. 1 1
      README.md
  2. 7 8
      examples/prompt-tuning-personachat.ipynb
  3. 4 5
      examples/prompt-tuning-sst2.ipynb

+ 1 - 1
README.md

@@ -155,7 +155,7 @@ loss.backward()
 print("Gradients (norm):", model.transformer.word_embeddings.weight.grad.norm())
 print("Gradients (norm):", model.transformer.word_embeddings.weight.grad.norm())
 ```
 ```
 
 
-Of course, this is a simplified code snippet. For actual training, see our example on "deep" prompt-tuning here.
+Of course, this is a simplified code snippet. For actual training, see the example notebooks with "deep" prompt-tuning:
 - Simple text semantic classification: [examples/prompt-tuning-sst2.ipynb](./examples/prompt-tuning-sst2.ipynb).
 - Simple text semantic classification: [examples/prompt-tuning-sst2.ipynb](./examples/prompt-tuning-sst2.ipynb).
 - A personified chatbot: [examples/prompt-tuning-personachat.ipynb](./examples/prompt-tuning-personachat.ipynb).
 - A personified chatbot: [examples/prompt-tuning-personachat.ipynb](./examples/prompt-tuning-personachat.ipynb).
 
 

+ 7 - 8
examples/prompt-tuning-personachat.ipynb

@@ -33,7 +33,6 @@
    "metadata": {},
    "metadata": {},
    "outputs": [],
    "outputs": [],
    "source": [
    "source": [
-    "# This block is only need for colab users. It will change nothing if you are running this notebook locally.\n",
     "import subprocess\n",
     "import subprocess\n",
     "import sys\n",
     "import sys\n",
     "\n",
     "\n",
@@ -41,14 +40,14 @@
     "IN_COLAB = 'google.colab' in sys.modules\n",
     "IN_COLAB = 'google.colab' in sys.modules\n",
     "\n",
     "\n",
     "if IN_COLAB:\n",
     "if IN_COLAB:\n",
-    "    subprocess.run(['git', 'clone', 'https://github.com/bigscience-workshop/petals'])\n",
-    "    subprocess.run(['pip', 'install', '-r', 'petals/requirements.txt'])\n",
-    "    subprocess.run(['pip', 'install', 'datasets', 'lib64'])\n",
+    "    subprocess.run(\"git clone https://github.com/bigscience-workshop/petals\", shell=True)\n",
+    "    subprocess.run(\"pip install -r petals/requirements.txt\", shell=True)\n",
+    "    subprocess.run(\"pip install datasets wandb\", shell=True)\n",
     "\n",
     "\n",
     "    try:\n",
     "    try:\n",
     "        subprocess.check_output([\"nvidia-smi\", \"-L\"])\n",
     "        subprocess.check_output([\"nvidia-smi\", \"-L\"])\n",
     "    except subprocess.CalledProcessError as e:\n",
     "    except subprocess.CalledProcessError as e:\n",
-    "        subprocess.run(['rm', '-r', '/usr/local/cuda/lib64'])\n",
+    "        subprocess.run(\"rm -r /usr/local/cuda/lib64\", shell=True)\n",
     "\n",
     "\n",
     "    sys.path.insert(0, './petals/')\n",
     "    sys.path.insert(0, './petals/')\n",
     "else:\n",
     "else:\n",
@@ -315,7 +314,7 @@
  ],
  ],
  "metadata": {
  "metadata": {
   "kernelspec": {
   "kernelspec": {
-   "display_name": "Python 3.8.10 64-bit",
+   "display_name": "Python 3.8.0 ('petals')",
    "language": "python",
    "language": "python",
    "name": "python3"
    "name": "python3"
   },
   },
@@ -329,11 +328,11 @@
    "name": "python",
    "name": "python",
    "nbconvert_exporter": "python",
    "nbconvert_exporter": "python",
    "pygments_lexer": "ipython3",
    "pygments_lexer": "ipython3",
-   "version": "3.8.9"
+   "version": "3.8.0"
   },
   },
   "vscode": {
   "vscode": {
    "interpreter": {
    "interpreter": {
-    "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
+    "hash": "a303c9f329a09f921588ea6ef03898c90b4a8e255a47e0bd6e36f6331488f609"
    }
    }
   }
   }
  },
  },

+ 4 - 5
examples/prompt-tuning-sst2.ipynb

@@ -33,7 +33,6 @@
    "metadata": {},
    "metadata": {},
    "outputs": [],
    "outputs": [],
    "source": [
    "source": [
-    "# This block is only need for colab users. It will change nothing if you are running this notebook locally.\n",
     "import subprocess\n",
     "import subprocess\n",
     "import sys\n",
     "import sys\n",
     "\n",
     "\n",
@@ -41,14 +40,14 @@
     "IN_COLAB = 'google.colab' in sys.modules\n",
     "IN_COLAB = 'google.colab' in sys.modules\n",
     "\n",
     "\n",
     "if IN_COLAB:\n",
     "if IN_COLAB:\n",
-    "    subprocess.run(['git', 'clone', 'https://github.com/bigscience-workshop/petals'])\n",
-    "    subprocess.run(['pip', 'install', '-r', 'petals/requirements.txt'])\n",
-    "    subprocess.run(['pip', 'install', 'datasets', 'lib64'])\n",
+    "    subprocess.run(\"git clone https://github.com/bigscience-workshop/petals\", shell=True)\n",
+    "    subprocess.run(\"pip install -r petals/requirements.txt\", shell=True)\n",
+    "    subprocess.run(\"pip install datasets wandb\", shell=True)\n",
     "\n",
     "\n",
     "    try:\n",
     "    try:\n",
     "        subprocess.check_output([\"nvidia-smi\", \"-L\"])\n",
     "        subprocess.check_output([\"nvidia-smi\", \"-L\"])\n",
     "    except subprocess.CalledProcessError as e:\n",
     "    except subprocess.CalledProcessError as e:\n",
-    "        subprocess.run(['rm', '-r', '/usr/local/cuda/lib64'])\n",
+    "        subprocess.run(\"rm -r /usr/local/cuda/lib64\", shell=True)\n",
     "\n",
     "\n",
     "    sys.path.insert(0, './petals/')\n",
     "    sys.path.insert(0, './petals/')\n",
     "else:\n",
     "else:\n",