diff --git a/Assignment-3/Assignment_3_Team_5.ipynb b/Assignment-3/Assignment_3_Team_5.ipynb deleted file mode 100644 index 9c6277a..0000000 --- a/Assignment-3/Assignment_3_Team_5.ipynb +++ /dev/null @@ -1,111 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "Assignment-3_Team-5.ipynb", - "provenance": [], - "collapsed_sections": [], - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "X2CQg6VH856T", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "843ae1bf-2d27-4eba-a68e-46e6535d2e17" - }, - "source": [ - "import numpy as np\n", - "import random\n", - "rate=0.1\n", - "def relu(x):\n", - " return x * (x > 0)\n", - "\n", - "def drelu(x):\n", - " return 1. * (x > 0)\n", - "\n", - "def sigmoid(A):\n", - " A=1/np.float128((1+np.exp(-A)))\n", - " return A\n", - "\n", - "Wset=[0]\n", - "Bset=[0]\n", - "layers=20 #####change the layers here\n", - "nodes=20 #####change the nodes here\n", - "Wset.append(np.random.rand(nodes,2)*0.1)\n", - "Bset.append(np.random.rand(nodes,1)*0.1)\n", - "for i in range(layers-2):\n", - " Wset.append(np.random.rand(nodes,nodes)*0.1)\n", - " Bset.append(np.random.rand(nodes,1)*0.1)\n", - "Wset.append(np.random.rand(1,nodes)*0.1)\n", - "Bset.append(np.random.rand(1,1)*0.1)\n", - "X=np.array([[0,0,1,1],[0,1,0,1]],dtype=np.float128)\n", - "A=[0]*(layers+1)\n", - "A[0]=X\n", - "Y=np.array([[0,0,0,1]],dtype=np.float128)\n", - "Zset=[0]*(layers+1)\n", - "dZ=[0]*(layers+1)\n", - "for i in range(0,1000000):\n", - " #Forward\n", - " # print(i)\n", - " for j in range(1,layers+1):\n", - " Zset[j]=np.dot(Wset[j],A[j-1])+Bset[j]\n", - " if j==layers:\n", - " A[j]=sigmoid(Zset[j])\n", - " else:\n", - " A[j]=relu(Zset[j])\n", - " # print(A[layers-1])\n", - " #Backward\n", - " for j in range(layers-1,-1,-1):\n", - " if j==(layers-1):\n", - " dZ[layers]=A[layers]-Y\n", - " #print(dZ[3])\n", - " dW=(np.dot(dZ[j+1],A[j].T))/4\n", - " dB=(np.sum(dZ[j+1],axis=1,keepdims=True))/4\n", - " Wset[j+1]=Wset[j+1]-rate*dW\n", - " Bset[j+1]=Bset[j+1]-rate*dB\n", - " dZ[j]=np.multiply(np.dot(Wset[j+1].T,dZ[j+1]),drelu(Zset[j]))\n", - "# print(\"hello\")\n", - "V=X\n", - "for i in range(1,layers):\n", - " V=np.dot(Wset[i],V)+Bset[i]\n", - " V=relu(V)\n", - "V=np.dot(Wset[layers],V)+Bset[layers]\n", - "V=sigmoid(V)\n", - "print(V)" - ], - "execution_count": 31, - "outputs": [ - { - "output_type": "stream", - "text": [ - "[[3.82910398e-22 3.55526951e-09 3.34088652e-09 9.99998750e-01]]\n" - ], - "name": "stdout" - } - ] - } - ] -} \ No newline at end of file diff --git a/Assignment-3/WEEK-2 Submission by TEAM 3 b/Assignment-3/WEEK-2 Submission by TEAM 3 deleted file mode 100644 index 7aa4bc8..0000000 --- a/Assignment-3/WEEK-2 Submission by TEAM 3 +++ /dev/null @@ -1,190 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "WEEK 2 Submission by TEAM3.ipynb", - "provenance": [], - "authorship_tag": "ABX9TyOZSbwxERcyR9Q2gvXuzmnV", - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Bs6KQXq9vPi-" - }, - "source": [ - "# ***WEEK 2 ASSIGNMENT SUBMISSION***\n", - "\n", - "***TEAM 3***\n", - "\n", - "\n", - "> *- Shreyasi Mandal*\n", - "\n", - "> *- Anushka Panda*\n", - "\n", - "> *- Lakshmi Pravallika*\n", - "\n", - "\n", - "\n", - "\n", - "\n" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "T08WYF97tZse" - }, - "source": [ - "# ***AND GATE USING NEURAL NETWORK***\n", - "\n", - "*We are going to develop an AND gate using an artificial neural network*\n", - "\n", - "***Some Terms***\n", - "\n", - "\n", - "* *Forward Propagation is used to compute the activations from the input, then hidden then the output layer* \n", - "* *What a Neural Network does is the same as logistic regression*\n", - "* *In Back-Propagation we start by computing the delta term for the output layer and then back-propagate to compute the delta terms of the hidden layers*\n", - "\n", - "\n", - "\n", - "\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "HKvDYWzggpE1", - "outputId": "f5a676fa-70fc-49e0-80a3-0708534cf0ec" - }, - "source": [ - "# import Python Libraries\n", - "import numpy as np\n", - "from matplotlib import pyplot as plt\n", - "\n", - "# Sigmoid Function to find out hypothesis\n", - "def sigmoid(z):\n", - "\treturn 1 / (1 + np.exp(-z))\n", - "\n", - "# Initialization of the neural network parameters\n", - "# Initialized all the weights in the range of between 0 and 1\n", - "# Bias values are initialized to 0\n", - "def initializeParameters(inputFeatures, neuronsInHiddenLayers, outputFeatures):\n", - "\tW1 = np.random.randn(neuronsInHiddenLayers, inputFeatures)\n", - "\tW2 = np.random.randn(outputFeatures, neuronsInHiddenLayers)\n", - "\tb1 = np.zeros((neuronsInHiddenLayers, 1))\n", - "\tb2 = np.zeros((outputFeatures, 1))\n", - "\t\n", - "\tparameters = {\"W1\" : W1, \"b1\": b1,\n", - "\t\t\t\t\"W2\" : W2, \"b2\": b2}\n", - "\t\n", - " return parameters\n", - "\n", - "# Forward Propagation in Neural Networks\n", - "def forwardPropagation(X, Y, parameters):\n", - "\tm = X.shape[1]\n", - "\tW1 = parameters[\"W1\"]\n", - "\tW2 = parameters[\"W2\"]\n", - "\tb1 = parameters[\"b1\"]\n", - "\tb2 = parameters[\"b2\"]\n", - "\n", - "\tZ1 = np.dot(W1, X) + b1 # Performing multiplication in matrices\n", - "\tA1 = sigmoid(Z1) # Calling the sigmoid function\n", - "\tZ2 = np.dot(W2, A1) + b2\n", - "\tA2 = sigmoid(Z2)\n", - "\n", - "\tcache = (Z1, A1, W1, b1, Z2, A2, W2, b2)\n", - "\tlogprobs = np.multiply(np.log(A2), Y) + np.multiply(np.log(1 - A2), (1 - Y))\n", - "\tcost = -np.sum(logprobs) / m\n", - "\treturn cost, cache, A2\n", - "\n", - "# Backward Propagation to compute partial derivatives (aka gradients)\n", - "def backwardPropagation(X, Y, cache):\n", - "\tm = X.shape[1]\n", - "\t(Z1, A1, W1, b1, Z2, A2, W2, b2) = cache\n", - "\t\n", - "\tdZ2 = A2 - Y\n", - "\tdW2 = np.dot(dZ2, A1.T) / m\n", - "\tdb2 = np.sum(dZ2, axis = 1, keepdims = True)\n", - "\t\n", - "\tdA1 = np.dot(W2.T, dZ2)\n", - "\tdZ1 = np.multiply(dA1, A1 * (1- A1))\n", - "\tdW1 = np.dot(dZ1, X.T) / m\n", - "\tdb1 = np.sum(dZ1, axis = 1, keepdims = True) / m\n", - "\t\n", - "\tgradients = {\"dZ2\": dZ2, \"dW2\": dW2, \"db2\": db2,\n", - "\t\t\t\t\"dZ1\": dZ1, \"dW1\": dW1, \"db1\": db1}\n", - "\treturn gradients\n", - "\n", - "# Updating the weights based on the negative gradients\n", - "def updateParameters(parameters, gradients, learningRate):\n", - "\tparameters[\"W1\"] = parameters[\"W1\"] - learningRate * gradients[\"dW1\"]\n", - "\tparameters[\"W2\"] = parameters[\"W2\"] - learningRate * gradients[\"dW2\"]\n", - "\tparameters[\"b1\"] = parameters[\"b1\"] - learningRate * gradients[\"db1\"]\n", - "\tparameters[\"b2\"] = parameters[\"b2\"] - learningRate * gradients[\"db2\"]\n", - "\treturn parameters\n", - "\n", - "# Model to learn the AND truth table\n", - "X = np.array([[0, 0, 1, 1], [0, 1, 0, 1]]) # AND input\n", - "Y = np.array([[0, 0, 0, 1]]) # AND output\n", - "\n", - "# Define model parameters\n", - "neuronsInHiddenLayers = 2 # number of hidden layer neurons (2)\n", - "inputFeatures = X.shape[0] # number of input features (2)\n", - "outputFeatures = Y.shape[0] # number of output features (1)\n", - "parameters = initializeParameters(inputFeatures, neuronsInHiddenLayers, outputFeatures)\n", - "epoch = 100000\n", - "learningRate = 0.01\n", - "losses = np.zeros((epoch, 1))\n", - "\n", - "for i in range(epoch):\n", - "\tlosses[i, 0], cache, A2 = forwardPropagation(X, Y, parameters)\n", - "\tgradients = backwardPropagation(X, Y, cache)\n", - "\tparameters = updateParameters(parameters, gradients, learningRate)\n", - "\n", - "# Testing\n", - "X = np.array([[1, 1, 0, 0], [0, 1, 0, 1]]) # AND input\n", - "cost, _, A2 = forwardPropagation(X, Y, parameters)\n", - "prediction = (A2 > 0.5) * 1.0\n", - "# print(A2)\n", - "print(prediction)\n" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "[[0. 1. 0. 0.]]\n" - ], - "name": "stdout" - } - ] - } - ] -} - - diff --git a/Assignment-3/assignment_3_Team_1.ipynb b/Assignment-3/assignment_3_Team_1.ipynb deleted file mode 100644 index 3bc6036..0000000 --- a/Assignment-3/assignment_3_Team_1.ipynb +++ /dev/null @@ -1,250 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "assignment-3_Team-1.ipynb", - "provenance": [], - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "6czqRVjVk9bv" - }, - "source": [ - "import numpy as np\n", - "import matplotlib.pyplot as plt" - ], - "execution_count": 2, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "_3_UMn3tsLpG" - }, - "source": [ - "def sigmoid(Z):\n", - " return 1/(1+np.exp(-Z))\n", - " \n", - " \n", - "def initialize_parameters(n_x, n_h, n_y):\n", - " W1 = np.random.randn(n_h, n_x)*0.01\n", - " b1 = np.zeros((n_h, 1))\n", - " W2 = np.random.randn(n_y, n_h)*0.01\n", - " b2 = np.zeros((n_y, 1))\n", - " \n", - " parameters = {\n", - " 'W1': W1,\n", - " 'b1': b1,\n", - " 'W2': W2,\n", - " 'b2': b2\n", - " }\n", - " return parameters\n", - " \n", - " \n", - "def forward_prop(X, parameters):\n", - " W1 = parameters['W1']\n", - " W2 = parameters['W2']\n", - " b1 = parameters['b1']\n", - " b2 = parameters['b2']\n", - " \n", - " Z1 = np.dot(W1, X) + b1\n", - " A1 = np.tanh(Z1)\n", - " Z2 = np.dot(W2, A1) + b2\n", - " A2 = sigmoid(Z2)\n", - " \n", - " # assert(A2.shape == (1, X.shape[1]))\n", - " \n", - " cache = {\n", - " 'Z1': Z1,\n", - " 'A1': A1,\n", - " 'Z2': Z2,\n", - " 'A2': A2\n", - " }\n", - " return A2, cache\n", - " \n", - " \n", - "def compute_cost(A2, Y):\n", - " m = Y.shape[1]\n", - " \n", - " cost = -np.sum(np.multiply(Y, np.log(A2)) + np.multiply(1-Y, np.log(1-A2))) / m\n", - " cost = float(np.squeeze(cost))\n", - " return cost\n", - " \n", - " \n", - "def back_prop(cache, parameters, X, Y):\n", - " A1 = cache['A1']\n", - " A2 = cache['A2']\n", - " W2 = parameters['W2']\n", - " \n", - " m = X.shape[1]\n", - " \n", - " dZ2 = A2 - Y\n", - " dW2 = np.dot(dZ2, A1.T) / m\n", - " db2 = np.sum(dZ2, axis=1, keepdims = True) / m\n", - " dZ1 = np.multiply(np.dot(W2.T, dZ2), 1-np.power(A1, 2))\n", - " dW1 = np.dot(dZ1, X.T) / m\n", - " db1 = np.sum(dZ1, axis=1, keepdims=True) / m\n", - " \n", - " grads = {\n", - " 'dW1': dW1,\n", - " 'db1': db1,\n", - " 'dW2': dW2,\n", - " 'db2': db2\n", - " }\n", - " return grads\n", - " \n", - " \n", - "def update_parameters(parameters, grads, learning_rate = 1.2):\n", - " W1 = parameters['W1']\n", - " W2 = parameters['W2']\n", - " b1 = parameters['b1']\n", - " b2 = parameters['b2']\n", - " \n", - " dW1 = grads['dW1']\n", - " dW2 = grads['dW2']\n", - " db1 = grads['db1']\n", - " db2 = grads['db2']\n", - " \n", - " W1 = W1 - learning_rate * dW1\n", - " W2 = W2 - learning_rate * dW2\n", - " b1 = b1 - learning_rate * db1\n", - " b2 = b2 - learning_rate * db2\n", - " \n", - " new_parameters = {\n", - " 'W1': W1,\n", - " 'b1': b1,\n", - " 'W2': W2,\n", - " 'b2': b2\n", - " }\n", - " return new_parameters\n", - " \n", - " \n", - "def nn_model(X, Y, num_iterations = 10000, print_cost=True):\n", - " np.random.seed(3)\n", - " \n", - " n_x, n_h, n_y = X.shape[0], 4, Y.shape[0];\n", - " parameters = initialize_parameters(n_x, n_h, n_y)\n", - " \n", - " for i in range(0, num_iterations):\n", - " A2, cache = forward_prop(X, parameters)\n", - " cost = compute_cost(A2, Y)\n", - " grads = back_prop(cache, parameters, X, Y)\n", - " parameters = update_parameters(parameters, grads)\n", - " \n", - " if print_cost and i%1000 == 0:\n", - " print('Cost after iteration %i: %f' %(i, cost))\n", - " \n", - " return parameters\n", - " \n", - " \n", - "def predict(parameters, X):\n", - " A2, cache = forward_prop(X, parameters)\n", - " yhat = A2\n", - " yhat = np.squeeze(yhat)\n", - " prediction = 0\n", - " if yhat > 0.5:\n", - " prediction = 1\n", - " \n", - " return prediction" - ], - "execution_count": 3, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "YSX9hyyH9jC5", - "outputId": "76d5bb01-98b6-462c-9c36-ef1a72127dcb" - }, - "source": [ - "X = np.array([[0, 0, 1, 1], [0, 1, 0, 1]])\n", - "Y = np.array([[0, 1, 1, 0]])\n", - "\n", - "trained_parameters = nn_model(X, Y)\n", - "\n", - "print(\"W1 = \" + str(trained_parameters[\"W1\"]))\n", - "print(\"b1 = \" + str(trained_parameters[\"b1\"]))\n", - "print(\"W2 = \" + str(trained_parameters[\"W2\"]))\n", - "print(\"b2 = \" + str(trained_parameters[\"b2\"]))" - ], - "execution_count": 17, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Cost after iteration 0: 0.693147\n", - "Cost after iteration 1000: 0.693147\n", - "Cost after iteration 2000: 0.693147\n", - "Cost after iteration 3000: 0.693147\n", - "Cost after iteration 4000: 0.693147\n", - "Cost after iteration 5000: 0.693147\n", - "Cost after iteration 6000: 0.693147\n", - "Cost after iteration 7000: 0.693147\n", - "Cost after iteration 8000: 0.693147\n", - "Cost after iteration 9000: 0.693147\n", - "W1 = [[ 0.01800884 0.00472305]\n", - " [-0.00106225 -0.01842699]\n", - " [-0.00302617 -0.00347474]\n", - " [-0.00045634 -0.00661726]]\n", - "b1 = [[ 1.28038051e-04]\n", - " [-5.31468643e-06]\n", - " [-3.99994857e-04]\n", - " [ 1.05941642e-04]]\n", - "W2 = [[ 0.00143819 -0.00386175 -0.0129717 0.00915141]]\n", - "b2 = [[-7.01216554e-05]]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "AO6m8nXhZKrV", - "outputId": "c392854d-5477-4e87-e98e-a35732140872" - }, - "source": [ - "prediction = predict(trained_parameters, np.array([[0], [1]]))\n", - "print(prediction)" - ], - "execution_count": 19, - "outputs": [ - { - "output_type": "stream", - "text": [ - "0\n" - ], - "name": "stdout" - } - ] - } - ] -} \ No newline at end of file diff --git a/Assignment-3/team2 (2).ipynb b/Assignment-3/team2 (2).ipynb deleted file mode 100644 index 3eafb10..0000000 --- a/Assignment-3/team2 (2).ipynb +++ /dev/null @@ -1,62 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "team2.ipynb", - "provenance": [], - "collapsed_sections": [] - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "code", - "metadata": { - "id": "oKxo5CjSsvPH" - }, - "source": [ - "import numpy as np\n", - " \n", - " \n", - "## MODEL\n", - " \n", - "x=np.array([[1,1], [1,0], [0,1], [0,0]])\n", - "y=np.array([1,0,0,0])\n", - "y=y.reshape(4,1)\n", - "c=0\n", - " \n", - "##TRAIN\n", - "for i in range(1000):\n", - " out= x[:, 0]+ x[:,1]+c\n", - " a=1/(1+np.exp(-out))\n", - " c=c- ((abs(a-y)>0.51)*(a-y)).sum()/10\n", - " \n", - "## OUT\n", - "a1,a2=input(\"Enter Nums :)\\n\").split()\n", - "a1=int(a1)\n", - "a2=int(a2)\n", - "print(np.exp(-(a1+a2+c))<1)" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "lUo_P4102kqn" - }, - "source": [ - "" - ], - "execution_count": null, - "outputs": [] - } - ] -} \ No newline at end of file diff --git a/Assignment-5/Report.md b/Assignment-5/Report.md deleted file mode 100644 index e966e0d..0000000 --- a/Assignment-5/Report.md +++ /dev/null @@ -1,50 +0,0 @@ -# SUMMARY OF THE PAPER -## INTRODUCTION -This paper deals with the problem of simplifying the language, sentence structure , style of the original Wikipedia articles into a more readable -form. The *Flesch-Kincaid* index is used as the measure of readability. - -## CONCEPTS DEALT -**Encoder**
-The Encoder used is the *Google Universal Sentence Encoder* which converts the lexemes used in the source into high dimensional vectors which encapsulate the information contained in the said lexeme.
- -**Lexeme Splitter**
-This is a heuristics driven engine which will split the source text into lexemes which are meaningful phrases or short sentences. This engine gives priority to sentecnes terminators like (.), (;), (:) when the length of the phrase goes above the specified hyper parameters, the engines falls back on secondary terminators like the (,) and (-).
- -**Text Generator**
-Here the Generative Pretrained Transformer-2 (GPT-2) is used. It is a language model which takes some source text as input and can generate text samples which are aligned with the source in meaning, style and content.
-Since this is a pretrained model, we do not need to train it for the content part but fine tuning is required for alignment of style. The text for fine tuning will be existing simple wikipedia articles.
- -**Similarity Index**
-The similarity index between two vectors is calculated as their inner product which for our purposes needs to as high as possible.
- -**Rejection Pass Filter**
-In this architecture the GPT-2 will generate a set of possible replacements for the source lexemes. The sentence encoder will convert all these phrases and the source lexemes into high dimensional vectors. The source lexemes will be replaced only if the maximum similarity index between source embedding and any of the possible replacements is more than the hyper parameter τ. If none of the possibilities satisfy the condition the source lexemes will not be replaced.
- -## PSEUDO CODE -``` -Input: content source s -Output: simplified text st = ”” -repeat -Identify next lexeme l = process(s) -for i = 1 to nsamples do -Generate next sample xi = generate(st) -Save embedding score e(xi) -end for -if max(|e(xi) − e(l)|) > τ then -st = argmax(|e(xi) − e(l)|) + st -else -st = l + st -end if -until source s is done -``` - -## ARCHITECTURE -![Please look to another PR](https://github.com/parinayc20/ACA-Wikipedia-Simplifier/blob/main/Assignment-5/assignment-architecture.png)
- -## HYPER PARAMETERS -τ is a rejection threshold (minimum acceptable similarity), {MIN, MINSOFT, -MAX} are the unconditional minimum, punctuation-marked minimum, and the maximum -lexeme size in words respectively, SEED is the number of lexemes provided as context, NSAMPLES stands for the number of candidates produced by the generator per each lexeme, and t◦ is the generation temperature.
- -## CONCLUSION -This model has a 37.7% replacement rate and an average of 4.712 increase in the readability index. The model performs better when the training material matches the context of the source. \ No newline at end of file diff --git a/Assignment-5/T4-Wikipedia_Simplifier-Summary.pdf b/Assignment-5/T4-Wikipedia_Simplifier-Summary.pdf new file mode 100644 index 0000000..716e540 Binary files /dev/null and b/Assignment-5/T4-Wikipedia_Simplifier-Summary.pdf differ diff --git a/Assignment-5/assignment-architecture.png b/Assignment-5/assignment-architecture.png deleted file mode 100644 index 746c5c4..0000000 Binary files a/Assignment-5/assignment-architecture.png and /dev/null differ diff --git a/Assignment_1_and_2/Assignment_1_Team_5.ipynb b/Assignment_1_and_2/Assignment_1_Team_5.ipynb deleted file mode 100644 index c742ebc..0000000 --- a/Assignment_1_and_2/Assignment_1_Team_5.ipynb +++ /dev/null @@ -1,197 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "Assignment_1_Team_5.ipynb", - "provenance": [], - "collapsed_sections": [], - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "b-YNPsO-GEbe" - }, - "source": [ - "def add_word(trie,word):\n", - " search=trie\n", - " for i in word:\n", - " if i in search:\n", - " pass\n", - " else:\n", - " search.update({i:{}})\n", - " search=search[i]" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "CL4l7eJo-rrd" - }, - "source": [ - "def in_trie(trie,word):\n", - " search=trie\n", - " ans=True\n", - " for i in word:\n", - " if i in search:\n", - " pass\n", - " else:\n", - " ans=False\n", - " break\n", - " search=search[i]\n", - " if(bool(search) and ans==True):\n", - " ans=False\n", - " return ans" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "DEFFZvxoLL2g" - }, - "source": [ - "def list_matches(trie,prefix):\n", - " li=[]\n", - " search=trie\n", - " flag=True\n", - " for i in prefix:\n", - " if i in list(search.keys()):\n", - " search=search[i]\n", - " else:\n", - " flag=False\n", - " break\n", - " if flag:\n", - " dfs(search,li,prefix)\n", - " return li\n" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "gQptkajip48W" - }, - "source": [ - "" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "2D1OtfcEPNJX" - }, - "source": [ - "def dfs(trie,li,s):\n", - " if not bool(trie):\n", - " li.append(s)\n", - " else:\n", - " for i in list(trie.keys()):\n", - " dfs(trie[i],li,s+i)" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "kKEXc4mNENi_", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "241d2cd2-181f-4df3-d4be-9b856044ef2a" - }, - "source": [ - "def create_trie(li):\n", - " trie={}\n", - " for word in li:\n", - " add_word(trie,word)\n", - " return trie\n", - "li=['noice','cool','nonagon']\n", - "trie=create_trie(li)\n", - "print(trie)\n" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "{'n': {'o': {'i': {'c': {'e': {}}}, 'n': {'a': {'g': {'o': {'n': {}}}}}}}, 'c': {'o': {'o': {'l': {}}}}}\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 354 - }, - "id": "F0C9s2RqYvSa", - "outputId": "68ea3cfa-659c-4fc7-94e0-661282c6084d" - }, - "source": [ - "def main():\n", - " words=['bed','bid','bit','step','set']\n", - " trie=create_trie(words)\n", - " print(in_trie(trie,'bid'))\n", - " print(in_trie(trie,'bidi'))\n", - " print(list_matches(trie,'bi'))\n", - "\n", - "main()" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "True\n", - "False\n" - ], - "name": "stdout" - }, - { - "output_type": "error", - "ename": "NameError", - "evalue": "ignored", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 6\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist_matches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrie\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'bi'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 7\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 8\u001b[0;31m \u001b[0mmain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m", - "\u001b[0;32m\u001b[0m in \u001b[0;36mmain\u001b[0;34m()\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0min_trie\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrie\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'bid'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 5\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0min_trie\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrie\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'bidi'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 6\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlist_matches\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtrie\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m'bi'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 7\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 8\u001b[0m \u001b[0mmain\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m\u001b[0m in \u001b[0;36mlist_matches\u001b[0;34m(trie, prefix)\u001b[0m\n\u001b[1;32m 10\u001b[0m \u001b[0;32mbreak\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mflag\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 12\u001b[0;31m \u001b[0mdfs\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msearch\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mli\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0mprefix\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 13\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mli\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mNameError\u001b[0m: name 'dfs' is not defined" - ] - } - ] - } - ] -} \ No newline at end of file diff --git a/Assignment_1_and_2/Assignment_1_and_2_(WEEK_1)_.ipynb b/Assignment_1_and_2/Assignment_1_and_2_(WEEK_1)_.ipynb deleted file mode 100644 index 078acca..0000000 --- a/Assignment_1_and_2/Assignment_1_and_2_(WEEK_1)_.ipynb +++ /dev/null @@ -1,1022 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "Assignment 1 and 2 (WEEK 1) .ipynb", - "provenance": [], - "collapsed_sections": [], - "authorship_tag": "ABX9TyPhYxkQafJllXmUsYClu1Xi", - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "IINU0vEZBnUB" - }, - "source": [ - "" - ], - "execution_count": 4, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "pHYkrb4MB1mY" - }, - "source": [ - "# ***WEEK 1 SUBMISSION***\n", - "\n", - "***Team 3***\n", - "\n", - "*Team Members*\n", - "\n", - "* *Shreyasi Mandal*\n", - "* *Anushka Panda*\n", - "* *Lakshmi Pravallika*\n", - "\n", - "\n", - "*Assignment 1* *(HW1 to be completed)*\n", - "\n", - "```\n", - "https://www.cis.upenn.edu/~cis192/tliu/\n", - "```\n", - "*Assignment 2* *(Problem Set 2 to be completed)*\n", - "\n", - "\n", - "\n", - "```\n", - "https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/\n", - "```\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - "\n", - " " - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "9Y51Z1eJDtoo" - }, - "source": [ - "***ASSIGNMENT 1***\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "JoLsyoOFD0qf", - "outputId": "e374ee3d-8c31-45c5-a91d-ddc0270ec90e" - }, - "source": [ - "def add_word(trie, word):\n", - "\n", - " # Add a word to the given trie.\n", - "\n", - " # Args:\n", - " # trie (dict): the dictionary representation of a trie\n", - " # word (str): the word to be added\n", - "\n", - " # Returns:\n", - " # None\n", - "\n", - " # Side effect:\n", - " # trie is modified with word included\n", - " for c in word:\n", - " if c in trie:\n", - " trie = trie[c]\n", - " else:\n", - " trie = trie.setdefault(c, {})\n", - "\n", - "def create_trie(word_list):\n", - "\n", - " # Creates a trie from the given word list.\n", - "\n", - " # Args:\n", - " # word_list (list): list of words (str)\n", - "\n", - " #Returns:\n", - " # dict: a dictionary representation of the trie\n", - " trie = dict()\n", - " for word in word_list:\n", - " add_word(trie, word)\n", - " return trie\n", - "\n", - "def in_trie(trie , word):\n", - "\n", - " # Check whether the given word is present within the trie.\n", - "\n", - " # Args:\n", - " # word (str): the word to check\n", - " # trie (dict): the trie to check against\n", - "\n", - " # Returns:\n", - " # bool: True if the word is in the trie, False if it is not\n", - "\n", - "\n", - " for c in word:\n", - " if c in trie:\n", - " trie = trie[c]\n", - " else:\n", - " return False\n", - " return True\n", - "\n", - "words_list = list() \n", - "\n", - "def return_trie(trie, word):\n", - " for c in word:\n", - " if c in trie:\n", - " trie = trie[c]\n", - " return trie\n", - " \n", - " \n", - "def list_matches(trie, prefix):\n", - "\n", - " # List all word with the given prefix in the trie.\n", - " # If no words in the trie match the given prefix, return an empty list.\n", - "\n", - " \n", - " # Args:\n", - " # prefix (str): the prefix to match against\n", - " # trie (dict): the trie to search over\n", - "\n", - " # Returns:\n", - " # list: all words in the trie that begin with prefix \n", - "\n", - "\n", - " if trie == {}:\n", - " words_list.append(prefix)\n", - " return \n", - " for c in trie:\n", - " list_matches(trie[c], prefix+c)\n", - "\n", - "\n", - "def main():\n", - "\n", - " word_list = ['Anushka', 'Panda', 'Pavilion', 'Pavement']\n", - " trie1 = create_trie(word_list)\n", - " print(trie1)\n", - " print(in_trie(trie1, 'Panda'))\n", - " print(in_trie(trie1, 'Payment'))\n", - " trie2 = return_trie(trie1, 'Pav')\n", - " list_matches(trie2, 'Pav')\n", - " print(words_list)\n", - "\n", - " word_list1 = ['Shreyasi', 'Mandal', 'Shrban', 'Silence']\n", - " my_trie = create_trie(word_list1)\n", - " print(my_trie)\n", - " print(in_trie(my_trie, 'Shreyasi'))\n", - " print(in_trie(my_trie, 'Shrei'))\n", - " print(list_matches(my_trie, 'Shr'))\n", - "\n", - " \n", - "if __name__ == '__main__':\n", - " main()" - ], - "execution_count": 10, - "outputs": [ - { - "output_type": "stream", - "text": [ - "{'A': {'n': {'u': {'s': {'h': {'k': {'a': {}}}}}}}, 'P': {'a': {'n': {'d': {'a': {}}}, 'v': {'i': {'l': {'i': {'o': {'n': {}}}}}, 'e': {'m': {'e': {'n': {'t': {}}}}}}}}}\n", - "True\n", - "False\n", - "['Pavilion', 'Pavement']\n", - "{'S': {'h': {'r': {'e': {'y': {'a': {'s': {'i': {}}}}}, 'b': {'a': {'n': {}}}}}, 'i': {'l': {'e': {'n': {'c': {'e': {}}}}}}}, 'M': {'a': {'n': {'d': {'a': {'l': {}}}}}}}\n", - "True\n", - "False\n", - "None\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "aLxgeomXEk15" - }, - "source": [ - "***ASSIGNMENT 2***\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "7_kkXQzdIxOn", - "outputId": "2d88da4c-d776-457a-80f0-15bd034e4bca" - }, - "source": [ - "!wget https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/ps2.zip\n", - "!unzip p*" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2021-04-19 17:17:40-- https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/ps2.zip\n", - "Resolving ocw.mit.edu (ocw.mit.edu)... 151.101.2.133, 151.101.66.133, 151.101.130.133, ...\n", - "Connecting to ocw.mit.edu (ocw.mit.edu)|151.101.2.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 458195 (447K) [application/zip]\n", - "Saving to: ‘ps2.zip’\n", - "\n", - "\rps2.zip 0%[ ] 0 --.-KB/s \rps2.zip 100%[===================>] 447.46K --.-KB/s in 0.007s \n", - "\n", - "2021-04-19 17:17:40 (63.1 MB/s) - ‘ps2.zip’ saved [458195/458195]\n", - "\n", - "Archive: ps2.zip\n", - " inflating: ps2/hangman.py \n", - " inflating: ps2/MIT6_0001F16_Pset2.pdf \n", - " inflating: ps2/words.txt \n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "mjUQtdsUEoUa", - "outputId": "595b4bfd-1b37-4863-ab68-9d5086ebb40a" - }, - "source": [ - "# Problem Set 2, hangman.py\n", - "# Team: 3 \n", - "# Members : Shreyasi Mandal, Anushka Panda, Lakshmi Pravallika\n", - "\n", - "\n", - "# Hangman Game\n", - "# -----------------------------------\n", - "\n", - "import random\n", - "import string\n", - "\n", - "WORDLIST_FILENAME = \"ps2/words.txt\"\n", - "\n", - "\n", - "def load_words():\n", - " \"\"\"\n", - " Returns a list of valid words. Words are strings of lowercase letters.\n", - " \n", - " Depending on the size of the word list, this function may\n", - " take a while to finish.\n", - " \"\"\"\n", - " print(\"Loading word list from file...\")\n", - " # inFile: file\n", - " inFile = open(WORDLIST_FILENAME, 'r')\n", - " # line: string\n", - " line = inFile.readline()\n", - " # wordlist: list of strings\n", - " wordlist = line.split()\n", - " print(\" \", len(wordlist), \"words loaded.\")\n", - " return wordlist\n", - "\n", - "\n", - "\n", - "def choose_word(wordlist):\n", - " \"\"\"\n", - " wordlist (list): list of words (strings)\n", - " \n", - " Returns a word from wordlist at random\n", - " \"\"\"\n", - " return random.choice(wordlist)\n", - "\n", - "# end of helper code\n", - "\n", - "# -----------------------------------\n", - "\n", - "# Load the list of words into the variable wordlist\n", - "# so that it can be accessed from anywhere in the program\n", - "wordlist = load_words()\n", - "\n", - "\n", - "def is_word_guessed(secret_word, letters_guessed):\n", - " '''\n", - " secret_word: string, the word the user is guessing; assumes all letters are\n", - " lowercase\n", - " letters_guessed: list (of letters), which letters have been guessed so far;\n", - " assumes that all letters are lowercase\n", - " returns: boolean, True if all the letters of secret_word are in letters_guessed;\n", - " False otherwise\n", - " '''\n", - " for i in range(len(secret_word)):\n", - " if secret_word[i] not in letters_guessed:\n", - " return False\n", - " return True\n", - "\n", - "\n", - "def get_index_positions(list_of_elems, element):\n", - " ''' Returns the indexes of all occurrences of give element in\n", - " the list- listOfElements '''\n", - " index_pos_list = []\n", - " index_pos = 0\n", - " while True:\n", - " try:\n", - " # Search for item in list from indexPos to the end of list\n", - " index_pos = list_of_elems.index(element, index_pos)\n", - " # Add the index position in list\n", - " index_pos_list.append(index_pos)\n", - " index_pos += 1\n", - " except ValueError as e:\n", - " break\n", - " return index_pos_list\n", - "\n", - "\n", - "def get_guessed_word(secret_word, letters_guessed):\n", - " '''\n", - " secret_word: string, the word the user is guessing\n", - " letters_guessed: list (of letters), which letters have been guessed so far\n", - " returns: string, comprised of letters, underscores (_), and spaces that represents\n", - " which letters in secret_word have been guessed so far.\n", - " '''\n", - " result=list()\n", - " for i in range(len(secret_word)):\n", - " result.append(\"_ \")\n", - " for c in letters_guessed:\n", - " if c in secret_word:\n", - " i=get_index_positions(secret_word, c)\n", - " for j in range(len(i)):\n", - " result[i[j]]=c\n", - " result1=\"\"\n", - " for ele in result:\n", - " result1+=ele\n", - "\n", - " return result1\n", - "\n", - "\n", - "def get_available_letters(letters_guessed):\n", - " '''\n", - " letters_guessed: list (of letters), which letters have been guessed so far\n", - " returns: string (of letters), comprised of letters that represents which letters have not\n", - " yet been guessed.\n", - " '''\n", - " result=string.ascii_lowercase\n", - " for c in letters_guessed:\n", - " result=result.replace(c,'')\n", - "\n", - " return result\n", - " \n", - " \n", - "\n", - "def hangman(secret_word):\n", - " '''\n", - " secret_word: string, the secret word to guess.\n", - " \n", - " Starts up an interactive game of Hangman.\n", - " \n", - " * At the start of the game, let the user know how many \n", - " letters the secret_word contains and how many guesses s/he starts with.\n", - " \n", - " * The user should start with 6 guesses\n", - "\n", - " * Before each round, you should display to the user how many guesses\n", - " s/he has left and the letters that the user has not yet guessed.\n", - " \n", - " * Ask the user to supply one guess per round. Remember to make\n", - " sure that the user puts in a letter!\n", - " \n", - " * The user should receive feedback immediately after each guess \n", - " about whether their guess appears in the computer's word.\n", - "\n", - " * After each guess, you should display to the user the \n", - " partially guessed word so far.\n", - " \n", - " Follows the other limitations detailed in the problem write-up.\n", - " '''\n", - " print(\"Welcome to the game Hangman!\")\n", - " print(\"I am thinking of a word that is \",len(secret_word),\" letters long.\")\n", - " warnings_left=3\n", - " print(\"You have 3 warnings left.\")\n", - " letter=list()\n", - "\n", - " unique=len(set(secret_word))\n", - " guesses_left=6\n", - " while guesses_left>0:\n", - " print(\"-----------------\")\n", - " print(\"You have \",(guesses_left),\" guesses left\")\n", - " print(\"Available letters: \"+ get_available_letters(letter))\n", - " print(\"Please guess a letter: \")\n", - " st=input()\n", - " \n", - "\n", - " if st in letter:\n", - " if warnings_left>0:\n", - " print(\"Oops! You've already guessed that letter. You have \",(warnings_left-1),\" warnings left: \"+get_guessed_word(secret_word, letter))\n", - " warnings_left-=1\n", - " else:\n", - " print(\"Oops! You've already guessed that letter. You have no warnings left so you lose one guess: \"+get_guessed_word(secret_word, letter))\n", - " guesses_left-=1\n", - " else: \n", - " if str.isalpha(st):\n", - " a=str.lower(st)\n", - " letter.append(a)\n", - " if a in secret_word:\n", - " print(\"Good guess: \"+get_guessed_word(secret_word, letter))\n", - " \n", - " else:\n", - " print(\"Oops! That letter is not in my word: \"+get_guessed_word(secret_word, letter))\n", - " if (a=='a')|(a=='e')|(a=='i')|(a=='o')|(a=='u'):\n", - " guesses_left-=2\n", - " else:\n", - " guesses_left-=1\n", - " \n", - " else:\n", - " if warnings_left>0:\n", - " print(\"Oops! That is not a valid letter. You have \",(warnings_left-1),\" warnings left: \"+get_guessed_word(secret_word, letter))\n", - " warnings_left-=1\n", - " else:\n", - " print(\"Oops! That is not a valid letter. You have no warnings left so you lose one guess: \"+get_guessed_word(secret_word, letter))\n", - " guesses_left-=1\n", - "\n", - " \n", - " if is_word_guessed(secret_word, letter):\n", - " print(\"-----------------\")\n", - " print(\"Congratulations, you won!\")\n", - " print(\"Your total score for this game is: \",((guesses_left)*unique))\n", - " break;\n", - "\n", - " if (guesses_left)==0:\n", - " print(\"-----------------\")\n", - " print(\"Sorry, you ran out of guesses. The word was \"+secret_word)\n", - "\n", - " \n", - "\n", - "# When you've completed your hangman function, scroll down to the bottom\n", - "# of the file and uncomment the first two lines to test\n", - "#(hint: you might want to pick your own\n", - "# secret_word while you're doing your own testing)\n", - "\n", - "\n", - "# -----------------------------------\n", - "\n", - "\n", - "\n", - "def match_with_gaps(my_word, other_word):\n", - " '''\n", - " my_word: string with _ characters, current guess of secret word\n", - " other_word: string, regular English word\n", - " returns: boolean, True if all the actual letters of my_word match the \n", - " corresponding letters of other_word, or the letter is the special symbol\n", - " _ , and my_word and other_word are of the same length;\n", - " False otherwise: \n", - " '''\n", - " s=my_word\n", - " for x in range(len(my_word)):\n", - " if my_word[x]==' ':\n", - " s=s.replace(' ','')\n", - " \n", - " if len(s)!=len(other_word):\n", - " return False\n", - " else:\n", - " for c in s:\n", - " if c!='_':\n", - " i=get_index_positions(s,c)\n", - " j=get_index_positions(other_word,c)\n", - " if len(i)!=len(j): \n", - " return False\n", - " else:\n", - " for a in range(len(i)):\n", - " if i[a]!=j[a]:\n", - " return False\n", - " \n", - " return True\n", - " \n", - " \n", - "\n", - "\n", - "\n", - "def show_possible_matches(my_word):\n", - " '''\n", - " my_word: string with _ characters, current guess of secret word\n", - " returns: nothing, but should print out every word in wordlist that matches my_word\n", - " Keep in mind that in hangman when a letter is guessed, all the positions\n", - " at which that letter occurs in the secret word are revealed.\n", - " Therefore, the hidden letter(_ ) cannot be one of the letters in the word\n", - " that has already been revealed.\n", - "\n", - " '''\n", - " result=\"\"\n", - " for i in range(len(wordlist)):\n", - " if match_with_gaps(my_word, wordlist[i]):\n", - " result+=(wordlist[i]+\" \")\n", - " \n", - " if len(result)==0:\n", - " print(\"No Matches Found\")\n", - " else:\n", - " print(\"Possible word matches are: \")\n", - " print(result)\n", - " \n", - "\n", - "\n", - "\n", - "def hangman_with_hints(secret_word):\n", - " '''\n", - " secret_word: string, the secret word to guess.\n", - " \n", - " Starts up an interactive game of Hangman.\n", - " \n", - " * At the start of the game, let the user know how many \n", - " letters the secret_word contains and how many guesses s/he starts with.\n", - " \n", - " * The user should start with 6 guesses\n", - " \n", - " * Before each round, you should display to the user how many guesses\n", - " s/he has left and the letters that the user has not yet guessed.\n", - " \n", - " * Ask the user to supply one guess per round. Make sure to check that the user guesses a letter\n", - " \n", - " * The user should receive feedback immediately after each guess \n", - " about whether their guess appears in the computer's word.\n", - "\n", - " * After each guess, you should display to the user the \n", - " partially guessed word so far.\n", - " \n", - " * If the guess is the symbol *, print out all words in wordlist that\n", - " matches the current guessed word. \n", - " \n", - " Follows the other limitations detailed in the problem write-up.\n", - " '''\n", - " print(\"Welcome to the game Hangman!\")\n", - " print(\"I am thinking of a word that is \",len(secret_word),\" letters long.\")\n", - " warnings_left=3\n", - " print(\"You have 3 warnings left.\")\n", - " letter=list()\n", - "\n", - " unique=len(set(secret_word))\n", - " guesses_left=6\n", - " while guesses_left>0:\n", - " print(\"-----------------\")\n", - " print(\"You have \",(guesses_left),\" guesses left\")\n", - " print(\"Available letters: \"+ get_available_letters(letter))\n", - " print(\"Please guess a letter: \")\n", - " st=input()\n", - " \n", - "\n", - " if st in letter:\n", - " if warnings_left>0:\n", - " print(\"Oops! You've already guessed that letter. You have \",(warnings_left-1),\" warnings left: \"+get_guessed_word(secret_word, letter))\n", - " warnings_left-=1\n", - " else:\n", - " print(\"Oops! You've already guessed that letter. You have no warnings left so you lose one guess: \"+get_guessed_word(secret_word, letter))\n", - " guesses_left-=1\n", - " else: \n", - " if str.isalpha(st):\n", - " a=str.lower(st)\n", - " letter.append(a)\n", - " if a in secret_word:\n", - " print(\"Good guess: \"+get_guessed_word(secret_word, letter))\n", - " \n", - " else:\n", - " print(\"Oops! That letter is not in my word: \"+get_guessed_word(secret_word, letter))\n", - " if (a=='a')|(a=='e')|(a=='i')|(a=='o')|(a=='u'):\n", - " guesses_left-=2\n", - " else:\n", - " guesses_left-=1\n", - " elif st=='*':\n", - " show_possible_matches(get_guessed_word(secret_word, letter))\n", - " else:\n", - " if warnings_left>0:\n", - " print(\"Oops! That is not a valid letter. You have \",(warnings_left-1),\" warnings left: \"+get_guessed_word(secret_word, letter))\n", - " warnings_left-=1\n", - " else:\n", - " print(\"Oops! That is not a valid letter. You have no warnings left so you lose one guess: \"+get_guessed_word(secret_word, letter))\n", - " guesses_left-=1\n", - "\n", - " \n", - " if is_word_guessed(secret_word, letter):\n", - " print(\"-----------------\")\n", - " print(\"Congratulations, you won!\")\n", - " print(\"Your total score for this game is: \",((guesses_left)*unique))\n", - " break;\n", - "\n", - " if (guesses_left)==0:\n", - " print(\"-----------------\")\n", - " print(\"Sorry, you ran out of guesses. The word was \"+secret_word)\n", - "\n", - "\n", - "\n", - "# When you've completed your hangman_with_hint function, comment the two similar\n", - "# lines above that were used to run the hangman function, and then uncomment\n", - "# these two lines and run this file to test!\n", - "# Hint: You might want to pick your own secret_word while you're testing.\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - " # pass\n", - "\n", - " # To test part 2, comment out the pass line above and\n", - " # uncomment the following two lines.\n", - " \n", - " #secret_word = choose_word(wordlist)\n", - " #hangman(secret_word)\n", - "\n", - "###############\n", - " \n", - " # To test part 3 re-comment out the above lines and \n", - " # uncomment the following two lines. \n", - " secret_word = choose_word(wordlist)\n", - " hangman_with_hints(\"tact\")\n", - " hangman_with_hints(\"apple\")\n", - " hangman_with_hints(\"else\")\n", - " hangman_with_hints(\"shreyasi\")\n", - " hangman_with_hints(secret_word)\n" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading word list from file...\n", - " 55900 words loaded.\n", - "Welcome to the game Hangman!\n", - "I am thinking of a word that is 4 letters long.\n", - "You have 3 warnings left.\n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: abcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "a\n", - "Good guess: _ a_ _ \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: bcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "a\n", - "Oops! You've already guessed that letter. You have 2 warnings left: _ a_ _ \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: bcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "s\n", - "Oops! That letter is not in my word: _ a_ _ \n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcdefghijklmnopqrtuvwxyz\n", - "Please guess a letter: \n", - "$\n", - "Oops! That is not a valid letter. You have 1 warnings left: _ a_ _ \n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcdefghijklmnopqrtuvwxyz\n", - "Please guess a letter: \n", - "t\n", - "Good guess: ta_ t\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcdefghijklmnopqruvwxyz\n", - "Please guess a letter: \n", - "e\n", - "Oops! That letter is not in my word: ta_ t\n", - "-----------------\n", - "You have 3 guesses left\n", - "Available letters: bcdfghijklmnopqruvwxyz\n", - "Please guess a letter: \n", - "e\n", - "Oops! You've already guessed that letter. You have 0 warnings left: ta_ t\n", - "-----------------\n", - "You have 3 guesses left\n", - "Available letters: bcdfghijklmnopqruvwxyz\n", - "Please guess a letter: \n", - "e\n", - "Oops! You've already guessed that letter. You have no warnings left so you lose one guess: ta_ t\n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: bcdfghijklmnopqruvwxyz\n", - "Please guess a letter: \n", - "c\n", - "Good guess: tact\n", - "-----------------\n", - "Congratulations, you won!\n", - "Your total score for this game is: 6\n", - "Welcome to the game Hangman!\n", - "I am thinking of a word that is 5 letters long.\n", - "You have 3 warnings left.\n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: abcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "a\n", - "Good guess: a_ _ _ _ \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: bcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "l\n", - "Good guess: a_ _ l_ \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: bcdefghijkmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "*\n", - "Possible word matches are: \n", - "addle adult agile aisle amble ample amply amyls angle ankle apple apply aptly arils atilt \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: bcdefghijkmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "p\n", - "Good guess: appl_ \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: bcdefghijkmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "p\n", - "Oops! You've already guessed that letter. You have 2 warnings left: appl_ \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: bcdefghijkmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "e\n", - "Good guess: apple\n", - "-----------------\n", - "Congratulations, you won!\n", - "Your total score for this game is: 24\n", - "Welcome to the game Hangman!\n", - "I am thinking of a word that is 4 letters long.\n", - "You have 3 warnings left.\n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: abcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "a\n", - "Oops! That letter is not in my word: _ _ _ _ \n", - "-----------------\n", - "You have 4 guesses left\n", - "Available letters: bcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "b\n", - "Oops! That letter is not in my word: _ _ _ _ \n", - "-----------------\n", - "You have 3 guesses left\n", - "Available letters: cdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "c\n", - "Oops! That letter is not in my word: _ _ _ _ \n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "2\n", - "Oops! That is not a valid letter. You have 2 warnings left: _ _ _ _ \n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "d\n", - "Oops! That letter is not in my word: _ _ _ _ \n", - "-----------------\n", - "You have 1 guesses left\n", - "Available letters: efghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "e\n", - "Good guess: e_ _ e\n", - "-----------------\n", - "You have 1 guesses left\n", - "Available letters: fghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "f\n", - "Oops! That letter is not in my word: e_ _ e\n", - "-----------------\n", - "Sorry, you ran out of guesses. The word was else\n", - "Welcome to the game Hangman!\n", - "I am thinking of a word that is 8 letters long.\n", - "You have 3 warnings left.\n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: abcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "s\n", - "Good guess: _ _ _ _ _ _ s_ \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: abcdefghijklmnopqrtuvwxyz\n", - "Please guess a letter: \n", - "i\n", - "Good guess: _ _ _ _ _ _ si\n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: abcdefghjklmnopqrtuvwxyz\n", - "Please guess a letter: \n", - "d\n", - "Oops! That letter is not in my word: _ _ _ _ _ _ si\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: abcefghjklmnopqrtuvwxyz\n", - "Please guess a letter: \n", - "y\n", - "Good guess: _ _ _ _ y_ si\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: abcefghjklmnopqrtuvwxz\n", - "Please guess a letter: \n", - "a\n", - "Good guess: _ _ _ _ yasi\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcefghjklmnopqrtuvwxz\n", - "Please guess a letter: \n", - "r\n", - "Good guess: _ _ r_ yasi\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcefghjklmnopqtuvwxz\n", - "Please guess a letter: \n", - "e\n", - "Good guess: _ _ reyasi\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcfghjklmnopqtuvwxz\n", - "Please guess a letter: \n", - "e\n", - "Oops! You've already guessed that letter. You have 2 warnings left: _ _ reyasi\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcfghjklmnopqtuvwxz\n", - "Please guess a letter: \n", - "h\n", - "Good guess: _ hreyasi\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcfgjklmnopqtuvwxz\n", - "Please guess a letter: \n", - "S\n", - "Good guess: _ hreyasi\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcfgjklmnopqtuvwxz\n", - "Please guess a letter: \n", - "e\n", - "Oops! You've already guessed that letter. You have 1 warnings left: _ hreyasi\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcfgjklmnopqtuvwxz\n", - "Please guess a letter: \n", - "r\n", - "Oops! You've already guessed that letter. You have 0 warnings left: _ hreyasi\n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcfgjklmnopqtuvwxz\n", - "Please guess a letter: \n", - "t\n", - "Oops! That letter is not in my word: _ hreyasi\n", - "-----------------\n", - "You have 4 guesses left\n", - "Available letters: bcfgjklmnopquvwxz\n", - "Please guess a letter: \n", - "o\n", - "Oops! That letter is not in my word: _ hreyasi\n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: bcfgjklmnpquvwxz\n", - "Please guess a letter: \n", - "o\n", - "Oops! You've already guessed that letter. You have no warnings left so you lose one guess: _ hreyasi\n", - "-----------------\n", - "You have 1 guesses left\n", - "Available letters: bcfgjklmnpquvwxz\n", - "Please guess a letter: \n", - "o\n", - "Oops! You've already guessed that letter. You have no warnings left so you lose one guess: _ hreyasi\n", - "-----------------\n", - "Sorry, you ran out of guesses. The word was Shreyasi\n", - "Welcome to the game Hangman!\n", - "I am thinking of a word that is 8 letters long.\n", - "You have 3 warnings left.\n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: abcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "a\n", - "Good guess: _ a_ _ _ _ _ _ \n", - "-----------------\n", - "You have 6 guesses left\n", - "Available letters: bcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: \n", - "p\n", - "Oops! That letter is not in my word: _ a_ _ _ _ _ _ \n", - "-----------------\n", - "You have 5 guesses left\n", - "Available letters: bcdefghijklmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "l\n", - "Oops! That letter is not in my word: _ a_ _ _ _ _ _ \n", - "-----------------\n", - "You have 4 guesses left\n", - "Available letters: bcdefghijkmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "*\n", - "Possible word matches are: \n", - "babbling bachelor bacillus backings backbite backbone backdrop backfire backlogs backrest backside backslid backspin backstop badgered badmouth baffling baggiest bagpiper bagpipes baguette bailiffs bailouts bakelite bakeries baldness baldrics balinese balkiest balloons balloted ballroom ballyhoo balmiest baluster bandying banditry banished banishes banister banjoist bankbook bankroll bankrupt bannered banquets banshees bantered baptists baptisms baptized baptizes barbered barbecue barbells barberry bareness barefoot bargello baritone barkings baronies baroness baronets barouche barreled barrette barriers barrooms basement baseborn baseless basenjis basilisk basketry bassinet bassoons basswood bastions bastings bastille batching bathetic bathoses bathrobe bathroom bathtubs batistes battiest battings battened battered battling bauxites bawdiest bayberry bayonets cabinets cabochon caboodle cabooses cackling caddying caddises cadences cadenced cadmiums caduceus caffeine caginess caissons caitiffs cajoling cajolery calcined calcines calcites calcitic calciums calculus caldrons calfskin calibers calicoes calipers callings calliope callused calluses calmness calomels calories calorics calumets calypsos cambered cambiums cambrics cameroon camisole camomile campfire camphors campsite campuses canceled candying candidly candling canister cankered canniest canoeing canoeist canonize canopied canopies cantered canteens canticle capering capeskin capitols caprices capriole capsized capsizes capstone capsules capsuled captions captious captives captured captures capuchin carbides carbines carbolic carbonic carbonyl carboxyl careered careened carefree careless caressed caresses careworn carillon carmines caroling carolers caroming carotene carotids caroused carouses carousel carpeted carports carrions carrying carriers cartoned cartoons carvings casement casework cashiers cashmere cassette cassocks castings castling castoffs casuists catbirds catching catchers catchier category catering caterers catheter cathodes catholic cationic cattiest caucused caucuses cauldron caulking caustics cautious cautions caviling cavilers cavities cavorted cayennes dabbling dabblers dactylic daffiest daffodil daintily daintier dainties daiquiri dairying dairymen dallying dampness dampened dandling dandruff dangling dankness dappling daringly darkness darkened darkroom darlings dartling dashikis dateline daughter daunting dauphins dawdling daybooks daylight daytimes dazzling eagerest eardrums earliest earldoms earlobes earmuffs earnings earnests earphone earrings earshots earthier easement easiness easterly eateries fabulist fabulous faceless faceting facility factions factious factored factotum faddists fagoting faiences failings failures fainting faintest fairness faithful fakeries falconed falconer falconry fallibly fallible fallowed falsetto faltered families famished famously fancying fanciers fanciest fanciful fanlight farceurs farewell farrowed farthing farthest fascists fascisms fascicle fashions fastness fastened fastener fathered fatherly fathomed fatigued fatigues fattiest fattened faulting faultier fauvisms favoring favorite gabbiest gabbling gabfests gadflies gadgetry gaggling gaieties galleons galloped galoshes gambling gamblers gamboled gamecock gamester gammoned gangling ganglion ganglier gangrene gangster gantlets gantries garbling gardened gardener gargling gargoyle garlicky garments garnered garrison garroted garrotes gartered gaslight gasoline gassiest gassings gasworks gatefold gatepost gathered gatherer gaudiest gauntest gauntlet gauziest gaveling gavottes gawkiest gayeties gazelles gazetted gazettes habiting habitues habitude hackling hackneys haggling hagglers hairiest haircuts hairless hairline hairpins halberds haleness halftone hallooed hallowed halogens haltered hamilton hammered hammocks hampered hamsters handiest handbill handbook handcuff handfuls handguns handling handlers handouts handsome handwork handymen hangings hangdogs hangouts hangover hankered haploids happiest happened harbored hardiest hardness hardened hardener hardship hardtops hardwire hardwood harelips haricots harmless harmonic harpists harpoons harrying harriers harrowed harshest hartford harvests hassling hassocks hastiest hastened hatboxes hatching hatchery hatchets hauberks haunches haunting hautboys hauteurs hawkweed hawthorn hayforks haylofts hayseeds haywires hazelnut haziness jabbered jackboot jacketed jackpots jadeites jaggedly jailbird jalopies jalousie jamboree jangling janitors japeries jaundice jaunting jauntily jauntier javelins jawboned jawbones jazziest katydids labeling labelers laboring laborers laborite laburnum lacewing laciness lacquers lacrosse lactoses ladybird ladybugs ladylike ladylove ladyship laetrile lakeside lambency lambskin lamented lampoons lamppost lampreys lancelet lancelot landings landlord languish languors lankiest lanolins lanterns lapwings larcener larkspur larruped larynges larynxes lashings lassoing latching latchkey lateness lathered latitude latrines latterly latticed lattices laughing laughter launched launcher launches launders lavender lavished lavishes lavishly lawfully lawgiver lawsuits laxities layering layettes layovers laziness machetes machined machines machismo mackerel maddened madhouse madwomen maestros magnetic magnetos mahjongs maidenly mailings mainline maintops majestic majoring majority maldives maleness maligned malinger malmseys maltoses mammoths mandible mandolin mandrels mandrill maneuver manfully mangiest mangling mangrove manholes manhoods manhunts manicure manifest manifold manikins maniples manliest mannered mannerly manpower mansions mantises mantling manumits marbling marching marchers margined marigold mariners maritime markings markdown markedly marketed marketer marksmen marlines marmoset marooned marquees marquise marrying marshier martinet martinis martyred marveled marxists masseurs masseuse mastered masterly mastiffs mastitis mastodon mastoids matching matchbox materiel matinees matrices matrixes matronly mattings mattered mattocks mattress maturing maturity maunders maverick maximize maximums mayflies nacelles nacreous naivetes nameless nankeens naperies napoleon narcoses narcosis narcotic narrowed narrower narrowly nascence nastiest nativity nattiest nauseous nautilus oarlocks pabulums pacifist pacifism pacified pacifies pacifier packings paddling paddocks padlocks pageboys painless painting painters pairings paisleys paleness palettes palfreys palimony palinode palmiest palmists palmetto palomino paltered paltrier pampered pamphlet pandered pandemic pandowdy paneling panelist pangolin panicles panicked panniers panpipes panthers pantheon pantries pantsuit papering papooses parboils parceled parching pardoned parented paretics parishes parities parleyed parodied parodies paroling parolees parotids paroxysm parquets parrying parroted parsnips partings parterre particle partners parvenus passions passings passbook passkeys passover passport password pastiest pasterns pastiche pastille pastimes pastries pastured pastures patching patented patently patentee pathetic pathogen patience patients patriots patroons pattered patterns paunches paupered pavement pavilion pawnshop paycheck payments payrolls rabidity raccoons racquets radioing radishes radiuses raffling ragtimes ragweeds railings raillery raiments rainiest rainbows raindrop rainless rakishly rallying rambling ramblers ramekins ramified ramifies ranching ranchers ranchero randomly rangiest rankling ransomed rapidity rapidest rappings rapports raptures rarefied rarefies rarities rashness raspiest ratchets ratified ratifies rationed ratlines rattiest rattling rattlers raveling ravening ravenous ravished ravishes rawboned rawhides saboteur sacristy saddened saddling saddlers sadirons sadistic safeness safeties saffrons sageness sailfish sailorly sainting salesmen salience salients salinity sallying sallower saltiest saltines saluting sameness samisens samoyeds sampling samplers sanctity sanctify sanction sanctums sandiest sandlots sandwich saneness sanguine sanitize sanskrit sapience saplings sappiest sapphire sapwoods sardined sardines sardonic sassiest satchels satirist satirize sauciest saunters sauteing sauterne savoring savories savvying sawbones sawbucks sawhorse sawmills saxhorns tablehop tabletop tabloids tabooing taciturn tackiest tackling tacklers taconite tactless tadpoles tailored tailpipe tailspin tailwind tainting takedown takeoffs takeover talented talkiest tallness tallyhos tallying talmudic tambours tameness tampered tangiest tangelos tangents tangibly tangible tangling tangoing tantrums tapering tapestry tapeworm taprooms taproots tapsters tardiest targeted tarrying tartness tasseled tastiest tasteful tattered tattling tattlers tattooed taunting tautness tawdrier tawniest taxonomy vaccines vacuoles vacuumed valences valeting validity valkyrie valorous vamoosed vamooses vampires vandykes vanillin vanished vanishes vanities vanquish vapidity vaporing vaporous vaporize vaqueros varicose varmints vaseline vastness vaulting vaunting wackiest waddings waddling waffling wagering waggling wainscot waitress wakening walkouts walleyed walleyes walloons walloped wallowed walruses waltzing wandered wanderer wangling wantoned wantonly warbling warblers wardrobe wardroom wardship wariness warlocks warlords warnings warriors warships wartimes washings washbowl washouts washroom washtubs wastings wasteful wastrels watching watchers watchdog watchful watchmen watering waterier waterloo wattling wavering waviness waxiness waxwings waxworks waybills waysides yachting yarmulke \n", - "-----------------\n", - "You have 4 guesses left\n", - "Available letters: bcdefghijkmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "b\n", - "Oops! That letter is not in my word: _ a_ _ _ _ _ _ \n", - "-----------------\n", - "You have 3 guesses left\n", - "Available letters: cdefghijkmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "c\n", - "Oops! That letter is not in my word: _ a_ _ _ _ _ _ \n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defghijkmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "g\n", - "Good guess: _ a_ g_ _ _ g\n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defhijkmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "*\n", - "Possible word matches are: \n", - "dangling jangling laughing mangling tangling tangoing wangling \n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defhijkmnoqrstuvwxyz\n", - "Please guess a letter: \n", - "t\n", - "Good guess: ta_ g_ _ _ g\n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defhijkmnoqrsuvwxyz\n", - "Please guess a letter: \n", - "n\n", - "Good guess: tang_ _ ng\n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defhijkmoqrsuvwxyz\n", - "Please guess a letter: \n", - "l\n", - "Oops! You've already guessed that letter. You have 2 warnings left: tang_ _ ng\n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defhijkmoqrsuvwxyz\n", - "Please guess a letter: \n", - "*\n", - "Possible word matches are: \n", - "tangling tangoing \n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defhijkmoqrsuvwxyz\n", - "Please guess a letter: \n", - "i\n", - "Good guess: tang_ ing\n", - "-----------------\n", - "You have 2 guesses left\n", - "Available letters: defhjkmoqrsuvwxyz\n", - "Please guess a letter: \n", - "o\n", - "Good guess: tangoing\n", - "-----------------\n", - "Congratulations, you won!\n", - "Your total score for this game is: 12\n" - ], - "name": "stdout" - } - ] - } - ] -} \ No newline at end of file diff --git a/Assignment_1_and_2/Assignment_2_Team_5.ipynb b/Assignment_1_and_2/Assignment_2_Team_5.ipynb deleted file mode 100644 index 3b9e585..0000000 --- a/Assignment_1_and_2/Assignment_2_Team_5.ipynb +++ /dev/null @@ -1,416 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "Assignment_2_Team_5.ipynb", - "provenance": [], - "collapsed_sections": [], - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "FnOMU764epk1", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "4f90409a-ff40-49d8-ff9f-8b4d11c1ee96" - }, - "source": [ - "import random\n", - "import string\n", - "!wget https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/ps2.zip\n", - "!mkdir file_destination\n", - "!unzip ps2.zip -d file_destination" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2021-04-17 17:47:54-- https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/ps2.zip\n", - "Resolving ocw.mit.edu (ocw.mit.edu)... 151.101.2.133, 151.101.66.133, 151.101.130.133, ...\n", - "Connecting to ocw.mit.edu (ocw.mit.edu)|151.101.2.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 458195 (447K) [application/zip]\n", - "Saving to: ‘ps2.zip.3’\n", - "\n", - "ps2.zip.3 100%[===================>] 447.46K --.-KB/s in 0.04s \n", - "\n", - "2021-04-17 17:47:55 (10.2 MB/s) - ‘ps2.zip.3’ saved [458195/458195]\n", - "\n", - "Archive: ps2.zip\n", - " inflating: file_destination/ps2/hangman.py \n", - " inflating: file_destination/ps2/MIT6_0001F16_Pset2.pdf \n", - " inflating: file_destination/ps2/words.txt \n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "FkG_RD33nSYU" - }, - "source": [ - "\n", - "def load_words():\n", - " print(\"Loading word list from file...\")\n", - " # inFile: file\n", - " inFile = open(\"/content/file_destination/ps2/words.txt\", 'r')\n", - " # line: string\n", - " line = inFile.readline()\n", - " # wordlist: list of strings\n", - " wordlist = line.split()\n", - " print(\" \", len(wordlist), \"words loaded.\")\n", - " return wordlist\n", - "\n" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "Z_Amtqdae3yS" - }, - "source": [ - "def choose_word(wordlist):\n", - " return random.choice(wordlist)" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "E0eCjYCle84B" - }, - "source": [ - "def is_word_guessed(secret_word, letters_guessed):\n", - " for letter in secret_word:\n", - " if letter in letters_guessed:\n", - " pass\n", - " else:\n", - " return False\n", - " return True" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "QmzIJbcHfAK5" - }, - "source": [ - "def get_guessed_word(secret_word, letters_guessed):\n", - " string=''\n", - " for x in secret_word:\n", - " if x in letters_guessed:\n", - " string=string+x\n", - " else:\n", - " string=string+'_ '\n", - " return string" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "71Lf9CFcfD5V" - }, - "source": [ - "def get_availible_letters(letters_guessed):\n", - " mum='abcdefghijklmnopqrstuvwxyz'\n", - " str=''\n", - " for c in mum:\n", - " if c in letters_guessed:\n", - " pass\n", - " else:\n", - " str=str+c\n", - " return str" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "8RtPjLBzfHmh" - }, - "source": [ - "def hangman(secret_word):\n", - " # FILL IN YOUR CODE HERE AND DELETE \"pass\"\n", - " print(\"Welcome to the game Hangman!\")\n", - " print(\"I am thinking of a word that is \"+str(len(secret_word))+ \" letters long.\")\n", - " print(\"You have 3 warnings left.\")\n", - " print(\"-------------\")\n", - " mum='abcdefghijklmnopqrstuvwxyz'\n", - " warning=3\n", - " guesses=6\n", - " letters_guessed=[]\n", - " vowel=\"aeiou\"\n", - " count=0\n", - " while guesses>0:\n", - " print(\"You have \"+str(guesses)+\" guesses left.\")\n", - " print(\"Availible letters:\",end=\" \")\n", - " y=get_availible_letters(letters_guessed)\n", - " print(y)\n", - " print(\"Please guess a letter: \",end=\" \")\n", - " char=input()\n", - " if char not in mum:\n", - " warning-=1\n", - " if warning<-1:\n", - " warning=-1\n", - " if warning>=0:\n", - " print(\"Oops! That is not a valid letter. You have \"+str(warning)+ \" warnings left: \"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " else:\n", - " print(\"Oops! That is not a valid letter. You have no warnings left so you lose one guess:\"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " guesses-=1\n", - " elif (char in secret_word) and (char not in letters_guessed):\n", - " letters_guessed.append(char)\n", - " count+=1\n", - " print(\"Good guess: \"+get_guessed_word(secret_word,letters_guessed))\n", - " elif (char in letters_guessed):\n", - " warning-=1\n", - " if warning<-1:\n", - " warning=-1\n", - " if warning>=0:\n", - " print(\"Oops! You've already guessed that letter. You have \"+str(warning)+ \" warnings left: \"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " else:\n", - " print(\"Oops! You've already guessed that letter. You have no warnings left so you lose one guess:\"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " guesses-=1\n", - " else:\n", - " letters_guessed.append(char)\n", - " print(\"Oops! That letter is not in my word: \"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " if char in vowel:\n", - " guesses-=2\n", - " else:\n", - " guesses-=1\n", - " print(\"-------------\")\n", - " if (get_guessed_word(secret_word,letters_guessed).strip() == secret_word.strip()):\n", - " break\n", - " if guesses<=0:\n", - " print(\"Sorry, you ran out of guesses. The word was \"+secret_word)\n", - " else:\n", - " print(\"Congratulations, you won!\");\n", - " print(\"Your total score for this game is: \"+str(guesses*count))" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "DBtWAqU-kVa1" - }, - "source": [ - "def match_with_gaps(my_word,other_word):\n", - " my_word=my_word.replace(\" \",\"\")\n", - " other_word=other_word.replace(\" \",\"\")\n", - " # print(my_word)\n", - " # print(other_word)\n", - " if(len(my_word)!=len(other_word)):\n", - " return False\n", - " for i in range(0,len(my_word)):\n", - " if((my_word[i]!='_') and (my_word[i]!=other_word[i])):\n", - " return False\n", - " if((my_word[i]=='_') and (other_word[i] in my_word)):\n", - " return False\n", - " return True\n", - "\n", - "# print(match_with_gaps(\"a _ _ o e\",\"apple\"))" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "1HHZlA_TkVnf" - }, - "source": [ - "def show_possible_matches(my_word):\n", - " count=0\n", - " for word in wordlist:\n", - " if(match_with_gaps(my_word,word)):\n", - " print(word,end=\" \")\n", - " count=1\n", - " if(count==0):\n", - " print(\"No matches found\",end=\" \")\n", - " print()" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "mBXKcBc0XdC8" - }, - "source": [ - "def hangman_with_hints(secret_word):\n", - " # FILL IN YOUR CODE HERE AND DELETE \"pass\"\n", - " print(\"Welcome to the game Hangman!\")\n", - " print(\"I am thinking of a word that is \"+str(len(secret_word))+ \" letters long.\")\n", - " print(\"You have 3 warnings left.\")\n", - " print(\"-------------\")\n", - " mum='abcdefghijklmnopqrstuvwxyz'\n", - " warning=3\n", - " guesses=6\n", - " letters_guessed=[]\n", - " vowel=\"aeiou\"\n", - " count=0\n", - " while guesses>0:\n", - " print(\"You have \"+str(guesses)+\" guesses left.\")\n", - " print(\"Availible letters:\",end=\" \")\n", - " y=get_availible_letters(letters_guessed)\n", - " print(y)\n", - " print(\"Please guess a letter: \",end=\" \")\n", - " char=input()\n", - " if char=='*':\n", - " print(\"Possible matches are:\")\n", - " show_possible_matches(get_guessed_word(secret_word,letters_guessed))\n", - " elif char not in mum:\n", - " warning-=1\n", - " if warning<-1:\n", - " warning=-1\n", - " if warning>=0:\n", - " print(\"Oops! That is not a valid letter. You have \"+str(warning)+ \" warnings left: \"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " else:\n", - " print(\"Oops! That is not a valid letter. You have no warnings left so you lose one guess:\"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " guesses-=1\n", - " elif (char in secret_word) and (char not in letters_guessed):\n", - " letters_guessed.append(char)\n", - " count+=1\n", - " print(\"Good guess: \"+get_guessed_word(secret_word,letters_guessed))\n", - " elif (char in letters_guessed):\n", - " warning-=1\n", - " if warning<-1:\n", - " warning=-1\n", - " if warning>=0:\n", - " print(\"Oops! You've already guessed that letter. You have \"+str(warning)+ \" warnings left: \"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " else:\n", - " print(\"Oops! You've already guessed that letter. You have no warnings left so you lose one guess:\"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " guesses-=1\n", - " else:\n", - " letters_guessed.append(char)\n", - " print(\"Oops! That letter is not in my word: \"+str(get_guessed_word(secret_word,letters_guessed)))\n", - " if char in vowel:\n", - " guesses-=2\n", - " else:\n", - " guesses-=1\n", - " print(\"-------------\")\n", - " if (get_guessed_word(secret_word,letters_guessed).strip() == secret_word.strip()):\n", - " break\n", - " if guesses<=0:\n", - " print(\"Sorry, you ran out of guesses. The word was \"+secret_word)\n", - " else:\n", - " print(\"Congratulations, you won!\");\n", - " print(\"Your total score for this game is: \"+str(guesses*count))" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "VcmxBMh4Xk5J", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "97bc7610-815f-427e-fdf2-236cbe8086d8" - }, - "source": [ - "wordlist = load_words()\n", - "secret_word = choose_word(wordlist)\n", - "hangman_with_hints(secret_word)" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading word list from file...\n", - " 55900 words loaded.\n", - "Welcome to the game Hangman!\n", - "I am thinking of a word that is 9 letters long.\n", - "You have 3 warnings left.\n", - "-------------\n", - "You have 6 guesses left.\n", - "Availible letters: abcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: a\n", - "Good guess: a_ _ _ a_ _ _ _ \n", - "-------------\n", - "You have 6 guesses left.\n", - "Availible letters: bcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: *\n", - "Possible matches are:\n", - "abeyances abidances abstained acclaimed acquaints actuality actualize actuaries actuating actuation actuators adoration adulating adulation adulators adulatory aerialist affiances affianced afflation ageratums agitating agitation agitators airmailed alliances alphabets ambiances amorality animation animalism animating animators annealing anomalies anomalous anteaters appealing appearing appeasing appeasers applauded appraised appraiser appraises archaisms archangel aromatics assuaging astraddle atonality attracted averaging avocation awkwarder awkwardly \n", - "-------------\n", - "You have 6 guesses left.\n", - "Availible letters: bcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: f\n", - "Oops! That letter is not in my word: a_ _ _ a_ _ _ _ \n", - "-------------\n", - "You have 5 guesses left.\n", - "Availible letters: bcdeghijklmnopqrstuvwxyz\n", - "Please guess a letter: g\n", - "Oops! That letter is not in my word: a_ _ _ a_ _ _ _ \n", - "-------------\n", - "You have 4 guesses left.\n", - "Availible letters: bcdehijklmnopqrstuvwxyz\n", - "Please guess a letter: h\n", - "Oops! That letter is not in my word: a_ _ _ a_ _ _ _ \n", - "-------------\n", - "You have 3 guesses left.\n", - "Availible letters: bcdeijklmnopqrstuvwxyz\n", - "Please guess a letter: i\n", - "Good guess: a_ i_ a_ _ _ _ \n", - "-------------\n", - "You have 3 guesses left.\n", - "Availible letters: bcdejklmnopqrstuvwxyz\n", - "Please guess a letter: j\n", - "Oops! That letter is not in my word: a_ i_ a_ _ _ _ \n", - "-------------\n", - "You have 2 guesses left.\n", - "Availible letters: bcdeklmnopqrstuvwxyz\n", - "Please guess a letter: k\n", - "Oops! That letter is not in my word: a_ i_ a_ _ _ _ \n", - "-------------\n", - "You have 1 guesses left.\n", - "Availible letters: bcdelmnopqrstuvwxyz\n", - "Please guess a letter: l\n", - "Oops! That letter is not in my word: a_ i_ a_ _ _ _ \n", - "-------------\n", - "Sorry, you ran out of guesses. The word was animators\n" - ], - "name": "stdout" - } - ] - } - ] -} \ No newline at end of file diff --git a/Assignment_1_and_2/Team2_Wikipedia_simplifier.ipynb b/Assignment_1_and_2/Team2_Wikipedia_simplifier.ipynb deleted file mode 100644 index 4b979c2..0000000 --- a/Assignment_1_and_2/Team2_Wikipedia_simplifier.ipynb +++ /dev/null @@ -1,863 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "Team2-Wikipedia-simplifier.ipynb", - "provenance": [], - "collapsed_sections": [], - "toc_visible": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "ZmfA7M1VtkxM" - }, - "source": [ - "# **Wikipidea Simplifier**\n", - "### *ACA 2021 Project*\n", - "\n", - "---\n", - "\n", - "> **Team 2**\n", - " 1. Akanksha Singh\n", - " 2. Harshit Raj\n", - " 3. Kunwar Preet Singh\n", - "\n", - "---\n", - "\n", - "> **Mentors**\n", - " * Ananya Gupta\n", - " * Rishabh Dugaye\n", - "\n", - "---\n", - "\n", - "April 12, 2021" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "wyTYGOnB1IDU" - }, - "source": [ - "# **General Instructions**\n", - "\n", - "* If you need more help with python, following links will help you:\n", - " * https://www.tutorialspoint.com/python/index.htm \n", - " * https://www.w3schools.com/python/\n", - "\n", - "* We want all of you to do the assignments on Colab and submit a Colab notebook. For this, you can go through the following links: \n", - " * https://www.coursera.org/lecture/getting-started-with-tensor-flow2/coding-tutorial-introduction-to-google-colab-qWInB\n", - " * https://www.youtube.com/watch?v=i-HnvsehuSw\n", - "\n", - "* Lastly, set up a Github account. If possible, go through this link to understand git: \n", - " * https://www.vogella.com/tutorials/Git/article.html. \n", - " * Next Sunday, that is, 18/04, we’ll conduct a git tutorial to help you through it. " - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "_GTSrNaX0dm2" - }, - "source": [ - "# **Assignment 1**\n", - "\n", - "You’re supposed to the HW1 from this link:\n", - "> https://www.cis.upenn.edu/~cis192/tliu/ \n", - "\n", - "Due date is Thu, Apr 15 by 6 PM" - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "JvEZivx729iC", - "outputId": "70ea3158-eb43-4036-8a93-44e5400c6877" - }, - "source": [ - "# Assing-1: Team 2\n", - "# Author : Aska\n", - "\n", - "import json\n", - "\n", - "trie = {\n", - " \"b\": {\n", - " \"e\":{\n", - " \"t\": {}\n", - " },\n", - " \"i\": {\n", - " \"d\": {},\n", - " \"t\": {}\n", - " }\n", - " },\n", - " \"s\": {\n", - " \"e\": {\n", - " \"t\": {}\n", - " },\n", - " \"t\": {\n", - " \"e\": {\n", - " \"p\": {}\n", - " }\n", - " }\n", - " }\n", - " }\n", - "curr=trie\n", - "word=[]\n", - "def print_words(trie,word):\n", - " # print(\"we entered this\")\n", - " if bool(trie)==0:\n", - " print(str(word))\n", - " else:\n", - " nodes=list(trie)\n", - " i=0\n", - " while i https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/ \n", - "\n", - "Due date is Thu, Apr 18 by 6 PM" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "Ovaxht3fgAkj" - }, - "source": [ - "## **Hangman (game)**\n", - "\n", - "#### **Pre req**\n", - "\n", - "\n", - "###### **To run the latter code we need a file \"words.txt\"**\n", - "\n", - "For the same Run the prev code before i.e. \n", - "\n", - "```\n", - "!wget https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/ps2.zip\n", - "!unzip p*\n", - " ```\n", - "Afterwards the next code should work just fine!\n", - "\n", - "---\n", - "\n", - "#### **How to play**\n", - "1. Choose would u like to take hints during game or not. \n", - " feed `y`if u want hints, `n` otherwise \n", - "\n", - "2. Then the system will tell you how many letter word u are about to guess.\n", - "\n", - "3. You start with 6 guesses \n", - " > loose one guess for each wrong consonant guessed \n", - " loose two guesses for each wrong vowel guessed \n", - "\n", - " Game ends when you run out of guesses\n", - "\n", - "4. [Only For Hint Mode] When u insert `*` u get hints to what the words might be\n", - "\n", - "5. When u insert a letter it accordingly\n", - "\n", - "6. Any invalid input will give u a warning \n", - " after three warning you start loosing guesses \n", - " Invalid input: any repeated letter or anything which is not letter. (`*` is allowed in hint mode)\n", - "\n", - "7. Score is calculated as : \n", - " > `Total score = guesses_remaining * number unique letters in secret_word`\n", - "\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "86ZP0_kU4gzd", - "colab": { - "base_uri": "https://localhost:8080/" - }, - "outputId": "bfacc7e4-72c4-4902-fd1c-3cd701847ac7" - }, - "source": [ - "!wget https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/ps2.zip\n", - "!unzip p*\n" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "--2021-04-17 11:09:25-- https://ocw.mit.edu/courses/electrical-engineering-and-computer-science/6-0001-introduction-to-computer-science-and-programming-in-python-fall-2016/assignments/ps2.zip\n", - "Resolving ocw.mit.edu (ocw.mit.edu)... 151.101.2.133, 151.101.66.133, 151.101.130.133, ...\n", - "Connecting to ocw.mit.edu (ocw.mit.edu)|151.101.2.133|:443... connected.\n", - "HTTP request sent, awaiting response... 200 OK\n", - "Length: 458195 (447K) [application/zip]\n", - "Saving to: ‘ps2.zip.2’\n", - "\n", - "\rps2.zip.2 0%[ ] 0 --.-KB/s \rps2.zip.2 100%[===================>] 447.46K --.-KB/s in 0.01s \n", - "\n", - "2021-04-17 11:09:25 (45.3 MB/s) - ‘ps2.zip.2’ saved [458195/458195]\n", - "\n", - "Archive: ps2.zip\n", - "caution: filename not matched: ps2.zip\n", - "caution: filename not matched: ps2.zip.1\n", - "caution: filename not matched: ps2.zip.2\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "yveGw5IFdcuI", - "outputId": "df7ed66c-d9e1-45ae-f743-bac7757c244c" - }, - "source": [ - "# FIRST RUN PREV CODE AND GET \"words.txt\" :D\n", - "\n", - "# Author : Salazar.h\n", - "# File : hangman.py\n", - "# Path : ~/Documents/Wikipedia Simplifier/ps2\n", - "\n", - "# Hangman Game\n", - "\n", - "import random\n", - "import string\n", - "\n", - "WORDLIST_FILENAME = \"./ps2/words.txt\"\n", - "\n", - "\n", - "def load_words():\n", - " # Returns a list of valid words. Words are strings of lowercase letters.\n", - "\n", - " print(\"Loading word list from file...\")\n", - "\n", - " # inFile: file\n", - " inFile = open(WORDLIST_FILENAME, 'r')\n", - "\n", - " # line: string\n", - " line = inFile.readline()\n", - "\n", - " # wordlist: list of strings\n", - " wordlist = line.split()\n", - "\n", - " print(\" \", len(wordlist), \"words loaded.\")\n", - " return wordlist\n", - "\n", - "\n", - "def choose_word(wordlist):\n", - "\n", - " # wordlist (list): list of words (strings)\n", - " # Returns a word from wordlist at random\n", - "\n", - " return random.choice(wordlist)\n", - "\n", - "\n", - "# Load the list of words into the variable wordlist\n", - "# so that it can be accessed from anywhere in the program\n", - "wordlist = load_words()\n", - "\n", - "\n", - "def is_word_guessed(secret_word, letters_guessed):\n", - " '''\n", - " secret_word: string, the word the user is guessing; assumes all letters are lowercase\n", - "\n", - " letters_guessed: list (of letters), which letters have been guessed so far; assumes that all letters are lowercase\n", - "\n", - " returns: boolean, True if all the letters of secret_word are in letters_guessed; False otherwise\n", - " '''\n", - " for letter in secret_word:\n", - " if letter not in letters_guessed:\n", - " return False\n", - " return True\n", - "\n", - "\n", - "def get_guessed_word(secret_word, letters_guessed):\n", - " '''\n", - " secret_word: string, the word the user is guessing\n", - "\n", - " letters_guessed: list (of letters), which letters have been guessed so far\n", - "\n", - " returns: string, comprised of letters, underscores (_), and spaces that represents \n", - " which letters in secret_word have been guessed so far.\n", - " '''\n", - " out = \"\"\n", - " for letter in secret_word:\n", - " if letter in letters_guessed:\n", - " out += letter\n", - " else:\n", - " out += \"_ \"\n", - "\n", - " return out\n", - "\n", - "\n", - "def get_available_letters(letters_guessed):\n", - " '''\n", - " letters_guessed: list (of letters), which letters have been guessed so far\n", - " returns: string (of letters), comprised of letters that represents which letters have not\n", - " yet been guessed.\n", - " '''\n", - " out = \"\"\n", - " a = string.ascii_lowercase\n", - " for letter in a:\n", - " if letter not in letters_guessed:\n", - " out += letter\n", - "\n", - " return out\n", - "\n", - "\n", - "vowels = \"aeiou\"\n", - "\n", - "\n", - "def hangman(secret_word):\n", - "\n", - " # Hangman Driver Code\n", - " '''\n", - " secret_word: string, the secret word to guess.\n", - "\n", - " Starts up an interactive game of Hangman.\n", - "\n", - " * At the start of the game, let the user know how many \n", - " letters the secret_word contains and how many guesses s/he starts with.\n", - "\n", - " * The user should start with 6 guesses\n", - "\n", - " * Before each round, you should display to the user how many guesses\n", - " s/he has left and the letters that the user has not yet guessed.\n", - "\n", - " * Ask the user to supply one guess per round. Make sure to check that the user guesses a letter\n", - "\n", - " * The user should receive feedback immediately after each guess \n", - " about whether their guess appears in the computer's word.\n", - "\n", - " * After each guess, you should display to the user the \n", - " partially guessed word so far.\n", - "\n", - " * If the guess is the symbol *, print out all words in wordlist that\n", - " matches the current guessed word. \n", - "\n", - " '''\n", - " print(\"Welcome to the game Hangman without Hints!\")\n", - " print(\"I am thinking of a word that is {} letters long.\".format(len(secret_word)))\n", - "\n", - " # warning left thoda mehnat kar lete hai\n", - "\n", - " warning_left = 2\n", - " letters_guessed = []\n", - " guessed_word = get_guessed_word(secret_word, letters_guessed)\n", - " i = -1\n", - "\n", - " # guesssing loop\n", - "\n", - " while i < 5 and not is_word_guessed(secret_word, letters_guessed):\n", - " i += 1\n", - " print(\"------------------\")\n", - "\n", - " # print number of guesses left\n", - " print(\"You have {} guess{} left.\".format(6-i, 'es' if i < 5 else ''))\n", - "\n", - " # letters available\n", - " available_letters = get_available_letters(letters_guessed)\n", - " print(\"Available letters: \" + available_letters)\n", - "\n", - " # ask for a input\n", - " letter = input(\"Please guess a letter : \")\n", - " letter = letter.lower()\n", - "\n", - " # if the letter is already been guessed\n", - " if letter in letters_guessed:\n", - " if warning_left == -1:\n", - " print(\n", - " \"Oops! You've already guessed that letter. You loose a guess as no warning left: \" + guessed_word)\n", - " else:\n", - " print(\"Oops! You've already guessed that letter. You now have {} warning{}: \".format(\n", - " warning_left, '' if warning_left == 1 else 's') + guessed_word)\n", - " warning_left -= 1\n", - " i -= 1\n", - " continue\n", - "\n", - " # if not the letter\n", - " if letter not in string.ascii_lowercase:\n", - " if warning_left == -1:\n", - " print(\n", - " \"Oops! That is not a valid letter. You loose a guess as no warning left: \" + guessed_word)\n", - " else:\n", - " print(\"Oops! That is not a valid letter. You now have {} warning{}: \".format(\n", - " warning_left, '' if warning_left == 1 else 's') + guessed_word)\n", - " warning_left -= 1\n", - " i -= 1\n", - " continue\n", - "\n", - " letters_guessed += letter\n", - " guessed_word = get_guessed_word(secret_word, letters_guessed)\n", - "\n", - " # if letter avialable or not available in it\n", - " if letter in secret_word:\n", - " print(\"Good guess: \" + guessed_word)\n", - " i -= 1\n", - " else:\n", - " print(\"Oops! That letter is not in my word: \" + guessed_word)\n", - " if letter in vowels:\n", - " i += 1\n", - "\n", - " # the final result so just end it already\n", - " if is_word_guessed(secret_word, letters_guessed):\n", - " print(\"Congratulations, you won!\")\n", - " res = 0\n", - " for x in letters_guessed:\n", - " if x in secret_word:\n", - " res += 1\n", - " i = 5-i if i < 6 else 0\n", - " res = res * i\n", - " print(\"Your total score for this game is : {}\".format(res))\n", - " else:\n", - " print(\"Sorry, you ran out of guesses. The word was {}.\".format(secret_word))\n", - "\n", - " return\n", - "\n", - "\n", - "def match_with_gaps(my_word, other_word):\n", - " '''\n", - " my_word: string with _ characters, current guess of secret word\n", - " other_word: string, regular English word\n", - " returns: boolean, True if all the actual letters of my_word match the \n", - " corresponding letters of other_word, or the letter is the special symbol\n", - " _ , and my_word and other_word are of the same length;\n", - " False otherwise: \n", - " '''\n", - " my_word = my_word.replace(\" \", \"\")\n", - " \n", - " if len(my_word) != len(other_word):\n", - " return False\n", - " \n", - " for i in range(len(my_word)):\n", - " if my_word[i] == '_':\n", - " continue\n", - " if my_word[i] != other_word[i]:\n", - " return False\n", - " \n", - " return True\n", - "\n", - "\n", - "def show_possible_matches(my_word):\n", - " '''\n", - " my_word: string with _ characters, current guess of secret word\n", - " returns: nothing, but should print out every word in wordlist that matches my_word\n", - " Keep in mind that in hangman when a letter is guessed, all the positions\n", - " at which that letter occurs in the secret word are revealed.\n", - " Therefore, the hidden letter(_ ) cannot be one of the letters in the word\n", - " that has already been revealed.\n", - " '''\n", - " reslt = []\n", - " for x in wordlist:\n", - " if match_with_gaps(my_word, x):\n", - " reslt.append(x)\n", - " \n", - " \n", - " print(\"Possible word matches are: \")\n", - " print(* reslt, sep=', ')\n", - " print(\"Guess for: \", my_word)\n", - "\n", - "\n", - "def hangman_with_hints(secret_word):\n", - "\n", - " # Hangman with hints Driver Code\n", - "\n", - " '''\n", - " secret_word: string, the secret word to guess.\n", - "\n", - " Starts up an interactive game of Hangman.\n", - "\n", - " * At the start of the game, let the user know how many \n", - " letters the secret_word contains and how many guesses s/he starts with.\n", - "\n", - " * The user should start with 6 guesses\n", - "\n", - " * Before each round, you should display to the user how many guesses\n", - " s/he has left and the letters that the user has not yet guessed.\n", - "\n", - " * Ask the user to supply one guess per round. Make sure to check that the user guesses a letter\n", - "\n", - " * The user should receive feedback immediately after each guess \n", - " about whether their guess appears in the computer's word.\n", - "\n", - " * After each guess, you should display to the user the \n", - " partially guessed word so far.\n", - "\n", - " * If the guess is the symbol *, print out all words in wordlist that\n", - " matches the current guessed word. \n", - "\n", - " '''\n", - " print(\"Welcome to the game Hangman with Hints!\")\n", - " print(\"I am thinking of a word that is {} letters long.\".format(len(secret_word)))\n", - " \n", - " # warning left thoda mehnat kar lete hai\n", - " \n", - " warning_left = 2\n", - " letters_guessed = []\n", - " guessed_word = get_guessed_word(secret_word, letters_guessed)\n", - " i = -1\n", - "\n", - " # guesssing loop\n", - "\n", - " while i < 5 and not is_word_guessed(secret_word, letters_guessed):\n", - " i += 1\n", - " print(\"------------------\")\n", - "\n", - " # print number of guesses left\n", - " print(\"You have {} guess{} left.\".format(6-i, 'es' if i < 5 else ''))\n", - "\n", - " # letters available\n", - " available_letters = get_available_letters(letters_guessed)\n", - " print(\"Available letters: \" + available_letters)\n", - "\n", - " # ask for a input\n", - " letter = input(\"Please guess a letter : \")\n", - " letter = letter.lower()\n", - "\n", - " # reply to hint\n", - " if letter == '*':\n", - " show_possible_matches(guessed_word)\n", - " i -= 1\n", - " continue\n", - " \n", - " # if the letter is already been guessed\n", - " if letter in letters_guessed:\n", - " if warning_left == -1:\n", - " print(\n", - " \"Oops! You've already guessed that letter. You loose a guess as no warning left: \" + guessed_word)\n", - " else:\n", - " print(\"Oops! You've already guessed that letter. You now have {} warning{}: \".format(\n", - " warning_left, '' if warning_left == 1 else 's') + guessed_word)\n", - " warning_left -= 1\n", - " i -= 1\n", - " continue\n", - "\n", - " # if not the letter\n", - " if letter not in string.ascii_lowercase:\n", - " if warning_left == -1:\n", - " print(\n", - " \"Oops! That is not a valid letter. You loose a guess as no warning left: \" + guessed_word)\n", - " else:\n", - " print(\"Oops! That is not a valid letter. You now have {} warning{}: \".format(\n", - " warning_left, '' if warning_left == 1 else 's') + guessed_word)\n", - " warning_left -= 1\n", - " i -= 1\n", - " continue\n", - "\n", - " letters_guessed += letter\n", - " guessed_word = get_guessed_word(secret_word, letters_guessed)\n", - "\n", - " # if letter avialable or not available in it\n", - " if letter in secret_word:\n", - " print(\"Good guess: \" + guessed_word)\n", - " i -= 1\n", - " else:\n", - " print(\"Oops! That letter is not in my word: \" + guessed_word)\n", - " if letter in vowels:\n", - " i += 1\n", - "\n", - " # the final result so just end it already\n", - " if is_word_guessed(secret_word, letters_guessed):\n", - " print(\"Congratulations, you won!\")\n", - " res = 0\n", - " for x in letters_guessed:\n", - " if x in secret_word:\n", - " res += 1\n", - " i = 5-i if i < 6 else 0\n", - " res = res * i\n", - " print(\"Your total score for this game is : {}\".format(res))\n", - " else:\n", - " print(\"Sorry, you ran out of guesses. The word was {}.\".format(secret_word))\n", - "\n", - " return\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - "\n", - " secret_word = choose_word(wordlist)\n", - " \n", - " # print(secret_word) ## turn it on if u wanna cheat XD\n", - "\n", - " # ask if u want hint\n", - " hint = \"s\"\n", - " while hint not in ['y','n']:\n", - " hint = input(\"Would u like to have hints? [y/n]: \")\n", - " hint = hint.lower()\n", - " \n", - " # see what function to call\n", - " if hint == 'n':\n", - " hangman(secret_word)\n", - " else:\n", - " hangman_with_hints(secret_word)\n", - "\n", - "# Yaaayyy KHATAM !\n" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading word list from file...\n", - " 55900 words loaded.\n", - "Would u like to have hints? [y/n]: y\n", - "Welcome to the game Hangman with Hints!\n", - "I am thinking of a word that is 8 letters long.\n", - "------------------\n", - "You have 6 guesses left.\n", - "Available letters: abcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter : l\n", - "Good guess: _ _ _ _ _ _ l_ \n", - "------------------\n", - "You have 6 guesses left.\n", - "Available letters: abcdefghijkmnopqrstuvwxyz\n", - "Please guess a letter : p\n", - "Oops! That letter is not in my word: _ _ _ _ _ _ l_ \n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: abcdefghijkmnoqrstuvwxyz\n", - "Please guess a letter : *\n", - "Possible word matches are: \n", - "abatable, abjectly, abruptly, absently, absurdly, accompli, accruals, actively, actually, admirals, adorably, adorable, adroitly, aerosols, airedale, airfield, airfoils, airmails, alcohols, amenably, amenable, amicably, amicable, amusedly, annually, anthills, apparels, ardently, arguably, arguable, arrivals, arsenals, artfully, assembly, assemble, astutely, atonally, avowedly, axolotls, bankroll, barbells, bargello, barnacle, baseball, bastille, beanpole, bearably, bearable, bedazzle, bedevils, bedrolls, befuddle, bejewels, belittle, bifocals, billfold, bimetals, binnacle, birdcall, bitingly, bitterly, biweekly, biyearly, blamably, blamable, blastula, blithely, blowhole, bluebell, bobtails, bordello, bouncily, brassily, brazenly, breezily, brightly, broccoli, brokenly, brothels, brussels, brutally, bunghole, caboodle, cakewalk, calomels, camisole, camomile, canaille, candidly, canticle, capitals, capitols, cappella, capriole, caracals, caracole, caramels, caravels, carryall, casually, catapult, catcalls, catchall, cattails, causally, chancels, channels, charnels, chasuble, chattily, chattels, cheekily, cheerily, chenille, chervils, chewable, chlorals, churchly, citadels, clavicle, cleverly, cloudily, clumsily, coarsely, codicils, coequals, cogently, colonels, commonly, conceals, congeals, controls, coquille, cordials, councils, counsels, covertly, cowardly, cowbells, cowgirls, crabbily, craftily, crankily, creakily, creamily, credibly, credible, creosols, crucible, crystals, culpably, culpable, curtails, curvedly, daintily, damnably, damnable, daringly, decently, decibels, decimals, demurely, deposals, despoils, devoutly, diastole, digitals, directly, disciple, dismally, distally, distills, divinely, docilely, doggedly, domicile, doorbell, dotingly, downfall, downhill, dreamily, drearily, dressily, droopily, drowsily, duckbill, dumbbell, dunghill, dutiable, easterly, educable, educible, eggshell, eligibly, eligible, embattle, embezzle, embroils, encircle, enfeeble, enkindle, ensemble, entangle, enthrall, entirely, entrails, enviably, enviable, erasable, escarole, ethanols, expertly, eyeballs, eyestalk, fallibly, fallible, famously, fantails, farewell, fascicle, fatherly, feasibly, feasible, federals, fiercely, filthily, fireball, fistfuls, fitfully, fivefold, flabbily, flagella, flagpole, flannels, flashily, flexibly, flexible, flimsily, flotilla, fluently, follicle, football, footfall, foothill, foothold, forcibly, forcible, foretell, foretold, forkfuls, formally, formerly, fourfold, freehold, freewill, friendly, friskily, frontals, frostily, frothily, frugally, frumpily, fulfills, funerals, gargoyle, gastrula, gatefold, generals, genially, genitals, gingerly, gladioli, glassily, globally, gloomily, glossily, godchild, goodwill, graduals, grapnels, greasily, greedily, groggily, groovily, grubbily, grumpily, guiltily, gullibly, gullible, gumboils, handball, handbill, handfuls, hardball, healable, heartily, heatedly, heavenly, hellhole, highball, hoarsely, hobnails, honestly, honolulu, hopefuls, hornball, horribly, horrible, horsefly, housefly, humanely, humerals, hungrily, imbecile, immobile, impanels, imperils, impishly, inedible, infidels, initials, inkwells, insanely, installs, instills, intently, inveigle, inwardly, jaggedly, jauntily, jezebels, jokingly, jonquils, journals, joyfully, joyously, juvenile, karakuls, kestrels, kickball, kingbolt, kinsfolk, kneehole, knightly, knothole, knowable, lacteals, laetrile, landfall, laterals, latterly, laudably, laudable, lavishly, lawfully, leasable, liberals, linearly, loblolly, logicals, loophole, lovingly, maidenly, mandible, mandrels, mandrill, manfully, manifold, mannerly, mantilla, manually, marigold, markedly, marshals, masterly, matronly, meatball, meltable, mentally, menthols, mightily, minerals, minutely, misapply, miscalls, miscible, misdeals, misdealt, misspell, mistrals, modestly, moldable, molecule, molehill, mongrels, monopoly, mooncalf, morosely, mortally, mothball, motherly, movingly, multiple, multiply, musicals, musicale, mutually, narrowly, narwhals, naturals, neutrals, normally, nostrils, nucleoli, numerals, numskull, nuptials, nutshell, oatmeals, obstacle, ocotillo, oddballs, operably, operable, ordinals, ornately, outfield, outsells, overalls, overcall, overkill, overrule, oversell, oversold, owlishly, palpably, palpable, panatela, parabola, parasols, parboils, particle, passably, passable, pastille, patently, payrolls, peephole, pentacle, perusals, phrasals, pigtails, pinballs, pinnacle, pinochle, pitfalls, pitiably, pitiable, placidly, playable, playbill, pluckily, politely, portably, portable, porthole, possibly, possible, potbelly, pratfall, preamble, prettily, pretzels, prevails, priestly, princely, probably, probable, promptly, properly, provably, provable, publicly, puffball, quarrels, quenelle, quintals, quotably, quotable, racially, radicals, rainfall, rakishly, randomly, readably, readable, recently, recitals, redouble, reflexly, refusals, rekindle, reliably, reliable, remodels, remotely, removals, renewals, reposals, resemble, respells, reticule, retrials, reusable, reveille, revivals, ridicule, roomfuls, roundels, ruefully, ruggedly, sailorly, salvable, satchels, satiable, savagely, sawmills, scaffold, scalpels, scandals, scarcely, schedule, scissile, scrabble, scraggly, scramble, scrapple, scribble, scrofula, seashell, secondly, secretly, securely, sedately, seizable, sensibly, sensible, serenely, severely, sextuple, sexually, shakable, shiftily, shoddily, shoptalk, shrewdly, shrivels, sidewalk, signally, silently, sinfully, sinkable, sinkhole, skewbald, skimpily, sleepily, slightly, sloppily, slovenly, smoothly, smudgily, smuttily, snappily, sneakily, snippily, snoopily, snootily, snorkels, snowball, snowfall, sociably, sociable, socially, soddenly, softball, solemnly, solvable, spaniels, sparsely, specials, speedily, spiffily, spiracle, spirilla, spookily, sportily, spousals, sprinkle, spunkily, squabble, squarely, squiggle, steadily, steamily, stencils, stodgily, stolidly, stormily, straddle, straggly, straggle, strangle, strictly, strongly, strudels, struggle, stuffily, stupidly, sturdily, subsoils, subtitle, suddenly, suitably, suitable, sullenly, sundials, superbly, supinely, syllable, tangibly, tangible, telltale, tenderly, tendrils, tentacle, terribly, terrible, testable, testicle, thermals, throttle, thurible, tillable, tinfoils, toenails, tomfools, toothily, topsails, topsoils, tortilla, trammels, trashily, travails, trefoils, triangle, trickily, trycycle, tubercle, tumbrels, turmoils, umbrella, unbuckle, uncouple, uneasily, unequals, unevenly, unfairly, ungainly, unicycle, uniquely, unjustly, unkindly, unlikely, unlovely, unravels, unsaddle, unseemly, unsettle, unsnarls, unstably, unstable, unsubtle, untangle, untidily, untimely, unusable, unwarily, unwisely, urgently, usefully, utensils, vacantly, valhalla, valuable, variably, variable, vendible, ventrals, verbally, vermeils, versicle, victrola, victuals, vincible, violably, violable, visually, vitriols, voidable, volatile, wantonly, washable, wassails, wastrels, waybills, wearable, werewolf, westerly, wheezily, wickedly, wilfully, windfall, windmill, withheld, withhold, woefully, woodpile, workable, wormhole, yourself\n", - "Guess for: _ _ _ _ _ _ l_ \n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: abcdefghijkmnoqrstuvwxyz\n", - "Please guess a letter : a\n", - "Good guess: _ _ a_ _ _ l_ \n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bcdefghijkmnoqrstuvwxyz\n", - "Please guess a letter : *\n", - "Possible word matches are: \n", - "abatable, beanpole, bearably, bearable, blamably, blamable, blastula, brassily, brazenly, chancels, channels, charnels, chasuble, chattily, chattels, clavicle, coarsely, crabbily, craftily, crankily, diastole, erasable, feasibly, feasible, flabbily, flagella, flagpole, flannels, flashily, gladioli, glassily, graduals, grapnels, healable, heartily, heatedly, heavenly, hoarsely, leasable, meatball, placidly, playable, playbill, pratfall, quarrels, readably, readable, scaffold, scalpels, scandals, scarcely, seashell, shakable, snappily, spaniels, sparsely, trammels, trashily, travails, wearable\n", - "Guess for: _ _ a_ _ _ l_ \n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bcdefghijkmnoqrstuvwxyz\n", - "Please guess a letter : e\n", - "Good guess: _ _ a_ _ el_ \n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bcdfghijkmnoqrstuvwxyz\n", - "Please guess a letter : *\n", - "Possible word matches are: \n", - "chancels, channels, charnels, chattels, coarsely, flannels, grapnels, hoarsely, quarrels, scalpels, scarcely, seashell, spaniels, sparsely, trammels\n", - "Guess for: _ _ a_ _ el_ \n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bcdfghijkmnoqrstuvwxyz\n", - "Please guess a letter : s\n", - "Good guess: _ _ a_ _ els\n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bcdfghijkmnoqrtuvwxyz\n", - "Please guess a letter : *\n", - "Possible word matches are: \n", - "chancels, channels, charnels, chattels, flannels, grapnels, quarrels, scalpels, spaniels, trammels\n", - "Guess for: _ _ a_ _ els\n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bcdfghijkmnoqrtuvwxyz\n", - "Please guess a letter : c\n", - "Good guess: c_ a_ _ els\n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bdfghijkmnoqrtuvwxyz\n", - "Please guess a letter : *\n", - "Possible word matches are: \n", - "chancels, channels, charnels, chattels\n", - "Guess for: c_ a_ _ els\n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bdfghijkmnoqrtuvwxyz\n", - "Please guess a letter : h\n", - "Good guess: cha_ _ els\n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bdfgijkmnoqrtuvwxyz\n", - "Please guess a letter : n\n", - "Good guess: cha_ nels\n", - "------------------\n", - "You have 5 guesses left.\n", - "Available letters: bdfgijkmoqrtuvwxyz\n", - "Please guess a letter : r\n", - "Good guess: charnels\n", - "Congratulations, you won!\n", - "Your total score for this game is : 40\n" - ], - "name": "stdout" - } - ] - } - ] -} \ No newline at end of file diff --git a/Assignment_1_and_2/Team_1_Wikipedia_simplifier.ipynb b/Assignment_1_and_2/Team_1_Wikipedia_simplifier.ipynb deleted file mode 100644 index 1f3da54..0000000 --- a/Assignment_1_and_2/Team_1_Wikipedia_simplifier.ipynb +++ /dev/null @@ -1,454 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "Team-1_Wikipedia simplifier.ipynb", - "provenance": [], - "collapsed_sections": [], - "authorship_tag": "ABX9TyNli7k1h1gpmUcG/s+H3B2E", - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "IxDlij8t1zqB" - }, - "source": [ - "# Wikipedia simplifier\n", - "\n", - "**ACA 2021 project**\n", - "\n", - "---\n", - "\n", - "**Team 1** \n", - "\n", - "1. Vaibhav goyal\n", - "2. Kajal deep\n", - "3. Alisha verma\n", - "\n", - "---\n", - "\n", - "**Mentors**\n", - "\n", - "1. Rishabh dugaye\n", - "2. Ananya gupta\n", - "\n", - "---\n", - "\n", - "\n", - "\n", - "# Assignment 1\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "ecPi4elO32O2" - }, - "source": [ - "def add_word(trie, word):\n", - " \"\"\"\n", - " Add a word to the given trie.\n", - "\n", - " Args:\n", - " trie (dict): the dictionary representation of a trie\n", - " word (str): the word to be added\n", - "\n", - " Returns:\n", - " None\n", - "\n", - " Side effect:\n", - " trie is modified with word included\n", - " \"\"\"\n", - " curr = trie\n", - " for l in word:\n", - " if l not in curr:\n", - " curr[l] = dict()\n", - " curr = curr[l]\n", - "\n", - "\n", - "def create_trie(word_list):\n", - " \"\"\"\n", - " Creates a trie from the given word list.\n", - "\n", - " Hint: use your completed implementation of add_word()\n", - "\n", - " Args:\n", - " word_list (list): list of words (str)\n", - "\n", - " Returns:\n", - " dict: a dictionary representation of the trie\n", - " \"\"\"\n", - " trie = dict()\n", - " for word in word_list:\n", - " curr = trie\n", - " for l in word:\n", - " if l not in curr:\n", - " curr[l] = dict()\n", - " curr = curr[l]\n", - "\n", - " return trie\n", - "\n", - "def in_trie(trie, word):\n", - " \"\"\"\n", - " Check whether the given word is present within the trie.\n", - "\n", - " Args:\n", - " word (str): the word to check\n", - " trie (dict): the trie to check against\n", - "\n", - " Returns:\n", - " bool: True if the word is in the trie, False if it is not\n", - " \"\"\"\n", - " curr = trie\n", - " for l in word:\n", - " if l in curr:\n", - " curr = curr[l]\n", - " else:\n", - " return False\n", - " if(bool(curr)):\n", - " return False\n", - " return True \n", - "\n", - "def list_matches(trie, prefix):\n", - " \"\"\"\n", - " List all word with the given prefix in the trie.\n", - " If no words in the trie match the given prefix, return an empty list.\n", - "\n", - " Hint: you may want to write a recursive helper function to traverse the\n", - " trie.\n", - "\n", - " Args:\n", - " prefix (str): the prefix to match against\n", - " trie (dict): the trie to search over\n", - "\n", - " Returns:\n", - " list: all words in the trie that begin with prefix\n", - " \"\"\"\n", - " curr = trie\n", - " for l in prefix:\n", - " if l in curr:\n", - " curr = curr[l]\n", - " else: \n", - " return []\n", - " \n", - " words = []\n", - "\n", - "def main():\n", - " \"\"\"main function\"\"\"\n", - " word_list = ['bear']\n", - " my_trie = create_trie(word_list)\n", - " add_word(my_trie, 'beat')\n", - " print(my_trie)\n", - " print(in_trie(my_trie, 'b'))\n", - "\n", - "\n", - "if __name__ == '__main__':\n", - " \"\"\"\n", - " Feel free to test your implementation here by running python3 hw1.py in\n", - " your terminal\n", - " \"\"\"\n", - " main()" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "4GzX8GVc3ypy" - }, - "source": [ - "# Assignment 2" - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "RAXhglWKlvJW", - "outputId": "36f4ab81-8177-4f16-b011-3ba41300e03c" - }, - "source": [ - "# Problem Set 2, hangman.py\n", - "import random\n", - "import string\n", - "\n", - "WORDLIST_FILENAME = \"words.txt\"\n", - "\n", - "def load_words():\n", - " \n", - " print(\"Loading word list from file...\")\n", - " # inFile: file\n", - " inFile = open(WORDLIST_FILENAME, 'r')\n", - " # line: string\n", - " line = inFile.readline()\n", - " # wordlist: list of strings\n", - " wordlist = line.split()\n", - " print(\" \", len(wordlist), \"words loaded.\")\n", - " return wordlist\n", - "\n", - "\n", - "def choose_word(wordlist):\n", - " \n", - " return random.choice(wordlist)\n", - "\n", - "\n", - "wordlist = load_words()\n", - "\n", - "\n", - "def is_word_guessed(secret_word, letters_guessed):\n", - "\n", - " for l in secret_word:\n", - " if l not in letters_guessed:\n", - " return False\n", - " return True\n", - "\n", - "\n", - "def get_guessed_word(secret_word, letters_guessed):\n", - " \n", - " word = secret_word[:]\n", - " for l in secret_word:\n", - " if l not in letters_guessed:\n", - " word = word.replace(l, '_ ', 1)\n", - " return word\n", - "\n", - "\n", - "def get_available_letters(letters_guessed):\n", - " \n", - " available_letters = string.ascii_lowercase\n", - " for l in letters_guessed:\n", - " available_letters = available_letters.replace(l, '')\n", - " return available_letters\n", - "\n", - "\n", - "def hangman(secret_word):\n", - " \n", - " print('Welcome to the game Hangman!')\n", - " print('I am thinking of a word that is', len(secret_word), 'letters long.')\n", - " print('You have 3 warnings left.')\n", - "\n", - " guess = 6\n", - " warning = 3\n", - " letters_guessed = []\n", - " \n", - " while guess:\n", - " print('-----------------------------------')\n", - " if is_word_guessed(secret_word, letters_guessed):\n", - " print('Congratulations, you won!')\n", - " print('Your total score for this game is', guess*len(set(secret_word)))\n", - " return\n", - " print('You have', guess, 'guesses left.')\n", - " print('Availbale letters:', get_available_letters(letters_guessed))\n", - " letter = input('Please guess a letter: ').lower()\n", - " if letter.isalpha():\n", - " if letter in letters_guessed:\n", - " if warning:\n", - " warning -= 1\n", - " print('Oops! You\\'ve already guessed that letter. You now have', warning ,'warnings :', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " guess -= 1\n", - " print('Oops! You\\'ve already guessed that letter. You have no warnings left so you lose a guess:', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " letters_guessed.append(letter)\n", - " if letter in secret_word:\n", - " print('Good guess:', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " if letter in ['a', 'e', 'i', 'o', 'u']:\n", - " guess -= 2\n", - " else:\n", - " guess -= 1 \n", - " print('Oops! That letter is not in my word:', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " if warning:\n", - " warning -= 1\n", - " print('Oops! That is not a valid letter. You now have', warning ,'warnings left:', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " guess -= 1\n", - " print('Oops! That is not a valid letter. You have no warnings left so you lose one guess:', get_guessed_word(secret_word, letters_guessed))\n", - "\n", - " print('-----------------------------------')\n", - " print('Sorry, you ran out of guesses. The word was', secret_word,'.')\n", - " return\n", - "\n", - "\n", - "def match_with_gaps(my_word, other_word):\n", - " \n", - " word = my_word[:]\n", - " word = \"\".join(word.split())\n", - " if len(word) == len(other_word):\n", - " pos = 0\n", - " for l in word:\n", - " if l != '_' and other_word.find(l, pos) != word.find(l, pos):\n", - " return False\n", - " pos += 1\n", - " return True\n", - " else:\n", - " return False\n", - "\n", - "\n", - "def show_possible_matches(my_word):\n", - " \n", - " found_words = []\n", - " for word in wordlist:\n", - " if match_with_gaps(my_word, word):\n", - " found_words.append(word)\n", - " \n", - " if len(found_words):\n", - " for word in found_words:\n", - " print(word, end='\\t')\n", - " else:\n", - " print('No matches found')\n", - " \n", - " print('\\n')\n", - "\n", - "\n", - "def hangman_with_hints(secret_word):\n", - " \n", - " print('Welcome to the game Hangman!')\n", - " print('I am thinking of a word that is', len(secret_word), 'letters long.')\n", - " print('You have 3 warnings left.')\n", - "\n", - " guess = 6\n", - " warning = 3\n", - " letters_guessed = []\n", - " \n", - " while guess:\n", - " print('-----------------------------------')\n", - " if is_word_guessed(secret_word, letters_guessed):\n", - " print('Congratulations, you won!')\n", - " print('Your total score for this game is', guess*len(set(secret_word)))\n", - " return\n", - " print('You have', guess, 'guesses left.')\n", - " print('Availbale letters:', get_available_letters(letters_guessed))\n", - " letter = input('Please guess a letter: ').lower()\n", - " if letter.isalpha():\n", - " if letter in letters_guessed:\n", - " if warning:\n", - " warning -= 1\n", - " print('Oops! You\\'ve already guessed that letter. You now have', warning ,'warnings :', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " guess -= 1\n", - " print('Oops! You\\'ve already guessed that letter. You have no warnings left so you lose a guess:', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " letters_guessed.append(letter)\n", - " if letter in secret_word:\n", - " print('Good guess:', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " if letter in ['a', 'e', 'i', 'o', 'u']:\n", - " guess -= 2\n", - " else:\n", - " guess -= 1 \n", - " print('Oops! That letter is not in my word:', get_guessed_word(secret_word, letters_guessed))\n", - " elif letter == '*':\n", - " print('Possible word matches are:')\n", - " show_possible_matches(get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " if warning:\n", - " warning -= 1\n", - " print('Oops! That is not a valid letter. You now have', warning ,'warnings left:', get_guessed_word(secret_word, letters_guessed))\n", - " else:\n", - " guess -= 1\n", - " print('Oops! That is not a valid letter. You have no warnings left so you lose one guess:', get_guessed_word(secret_word, letters_guessed))\n", - "\n", - " print('-----------------------------------')\n", - " print('Sorry, you ran out of guesses. The word was', secret_word,'.')\n", - " return\n", - "\n", - "\n", - "if __name__ == \"__main__\":\n", - " \n", - " secret_word = choose_word(wordlist)\n", - " # hangman(secret_word)\n", - " \n", - " hangman_with_hints(secret_word)\n" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Loading word list from file...\n", - " 55900 words loaded.\n", - "Welcome to the game Hangman!\n", - "I am thinking of a word that is 7 letters long.\n", - "You have 3 warnings left.\n", - "-----------------------------------\n", - "You have 6 guesses left.\n", - "Availbale letters: abcdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: b\n", - "Oops! That letter is not in my word: _ _ _ _ _ _ _ \n", - "-----------------------------------\n", - "You have 5 guesses left.\n", - "Availbale letters: acdefghijklmnopqrstuvwxyz\n", - "Please guess a letter: e\n", - "Good guess: _ _ _ _ _ e_ \n", - "-----------------------------------\n", - "You have 5 guesses left.\n", - "Availbale letters: acdfghijklmnopqrstuvwxyz\n", - "Please guess a letter: f\n", - "Oops! That letter is not in my word: _ _ _ _ _ e_ \n", - "-----------------------------------\n", - "You have 4 guesses left.\n", - "Availbale letters: acdghijklmnopqrstuvwxyz\n", - "Please guess a letter: d\n", - "Good guess: _ d_ _ _ ed\n", - "-----------------------------------\n", - "You have 4 guesses left.\n", - "Availbale letters: acghijklmnopqrstuvwxyz\n", - "Please guess a letter: p\n", - "Good guess: _ d_ p_ ed\n", - "-----------------------------------\n", - "You have 4 guesses left.\n", - "Availbale letters: acghijklmnoqrstuvwxyz\n", - "Please guess a letter: n\n", - "Oops! That letter is not in my word: _ d_ p_ ed\n", - "-----------------------------------\n", - "You have 3 guesses left.\n", - "Availbale letters: acghijklmoqrstuvwxyz\n", - "Please guess a letter: *\n", - "Possible word matches are:\n", - "adapted\tadopted\t\n", - "\n", - "-----------------------------------\n", - "You have 3 guesses left.\n", - "Availbale letters: acghijklmoqrstuvwxyz\n", - "Please guess a letter: a\n", - "Good guess: adap_ ed\n", - "-----------------------------------\n", - "You have 3 guesses left.\n", - "Availbale letters: cghijklmoqrstuvwxyz\n", - "Please guess a letter: t\n", - "Good guess: adapted\n", - "-----------------------------------\n", - "Congratulations, you won!\n", - "Your total score for this game is 15\n" - ], - "name": "stdout" - } - ] - } - ] -} \ No newline at end of file diff --git a/Assignment-3/README.md b/Assignment_3/README.md similarity index 100% rename from Assignment-3/README.md rename to Assignment_3/README.md diff --git a/Assignment-3/T4-Assignment_3.ipynb b/Assignment_3/T4-Assignment_3.ipynb similarity index 100% rename from Assignment-3/T4-Assignment_3.ipynb rename to Assignment_3/T4-Assignment_3.ipynb diff --git a/Assignment_4/Assignment-4_Team-5.ipynb b/Assignment_4/Assignment-4_Team-5.ipynb deleted file mode 100644 index 279ea71..0000000 --- a/Assignment_4/Assignment-4_Team-5.ipynb +++ /dev/null @@ -1,277 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "Untitled6.ipynb", - "provenance": [], - "collapsed_sections": [], - "include_colab_link": true - }, - "kernelspec": { - "display_name": "Python 3", - "name": "python3" - }, - "language_info": { - "name": "python" - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Uez4c4iOo3GS" - }, - "source": [ - "import tensorflow as tf\n", - "import numpy as np\n", - "import tensorflow_datasets as tfds" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "nZTBxKU4pIus" - }, - "source": [ - "dataset=tfds.load('imdb_reviews',as_supervised=True)\n", - "train_dataset=dataset['train']\n", - "test_dataset=dataset['test']\n", - "#train_dataset=train_dataset.take(500)" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "c2qy8eHspZSv" - }, - "source": [ - "BUFFER_SIZE=10000\n", - "BATCH_SIZE=50" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "Pfn1qaYppnP4" - }, - "source": [ - "train_dataset=train_dataset.shuffle(BATCH_SIZE).batch(BATCH_SIZE).prefetch(tf.data.AUTOTUNE)\n", - "test_dataset=test_dataset.batch(BATCH_SIZE).prefetch(tf.data.AUTOTUNE)" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "Owbj9oAqqBMp" - }, - "source": [ - "VOCAB_SIZE=1000\n", - "encoder=tf.keras.layers.experimental.preprocessing.TextVectorization(VOCAB_SIZE)\n", - "encoder.adapt(train_dataset.map(lambda text,label : text))" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "w62UYTF_qt0C", - "outputId": "4c7be2e3-ed49-4b12-cf12-7ba57a3b3c9a" - }, - "source": [ - "vocab=np.array(encoder.get_vocabulary())\n", - "print(vocab[199])" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "girl\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "zNqZ2ZcjrZJC" - }, - "source": [ - "\n", - "model = tf.keras.Sequential([\n", - " encoder,\n", - " tf.keras.layers.Embedding(\n", - " input_dim=len(encoder.get_vocabulary()),\n", - " output_dim=64,\n", - " # Use masking to handle the variable sequence lengths\n", - " mask_zero=True),\n", - " tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64,return_sequences=True,recurrent_dropout=0.1)),\n", - " tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(32,recurrent_dropout=0.1)),\n", - " tf.keras.layers.Dense(64, activation='relu'),\n", - " tf.keras.layers.Dense(64,activation='relu'), tf.keras.layers.Dense(64,activation='relu'),\n", - " tf.keras.layers.Dense(1)\n", - "])" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "id": "bYeR3SY9NTZ-" - }, - "source": [ - "model.compile(loss=tf.keras.losses.BinaryCrossentropy(from_logits=True),\n", - " optimizer=tf.keras.optimizers.Adam(),\n", - " metrics=['accuracy'])" - ], - "execution_count": null, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 471 - }, - "id": "HK8XqKolNa0t", - "outputId": "8da68a4b-60a5-4ace-9aab-3ab00c7dec7c" - }, - "source": [ - "history = model.fit(train_dataset, epochs=10,\n", - " validation_data=test_dataset,\n", - " validation_steps=30)" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Epoch 1/10\n", - "10/10 [==============================] - 68s 6s/step - loss: 2.6994 - accuracy: 0.5196 - val_loss: 0.7069 - val_accuracy: 0.5000\n", - "Epoch 2/10\n", - "10/10 [==============================] - 57s 6s/step - loss: 0.7299 - accuracy: 0.4969 - val_loss: 0.8615 - val_accuracy: 0.4993\n", - "Epoch 3/10\n", - "10/10 [==============================] - 56s 6s/step - loss: 1.0709 - accuracy: 0.5026 - val_loss: 0.6973 - val_accuracy: 0.4980\n", - "Epoch 4/10\n", - " 1/10 [==>...........................] - ETA: 25s - loss: 0.7865 - accuracy: 0.5600" - ], - "name": "stdout" - }, - { - "output_type": "error", - "ename": "KeyboardInterrupt", - "evalue": "ignored", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 1\u001b[0m history = model.fit(train_dataset, epochs=10,\n\u001b[1;32m 2\u001b[0m \u001b[0mvalidation_data\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mtest_dataset\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 3\u001b[0;31m validation_steps=30)\n\u001b[0m", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36mfit\u001b[0;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_batch_size, validation_freq, max_queue_size, workers, use_multiprocessing)\u001b[0m\n\u001b[1;32m 1098\u001b[0m _r=1):\n\u001b[1;32m 1099\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_train_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1100\u001b[0;31m \u001b[0mtmp_logs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1101\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1102\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwds)\u001b[0m\n\u001b[1;32m 826\u001b[0m \u001b[0mtracing_count\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 827\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTrace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_name\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mtm\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 828\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 829\u001b[0m \u001b[0mcompiler\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m\"xla\"\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_experimental_compile\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;34m\"nonXla\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 830\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m_call\u001b[0;34m(self, *args, **kwds)\u001b[0m\n\u001b[1;32m 853\u001b[0m \u001b[0;31m# In this case we have created variables on the first call, so we run the\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 854\u001b[0m \u001b[0;31m# defunned version which is guaranteed to never create variables.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 855\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_stateless_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;31m# pylint: disable=not-callable\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 856\u001b[0m \u001b[0;32melif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_stateful_fn\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 857\u001b[0m \u001b[0;31m# Release the lock early so that multiple threads can perform the call\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 2941\u001b[0m filtered_flat_args) = self._maybe_define_function(args, kwargs)\n\u001b[1;32m 2942\u001b[0m return graph_function._call_flat(\n\u001b[0;32m-> 2943\u001b[0;31m filtered_flat_args, captured_inputs=graph_function.captured_inputs) # pylint: disable=protected-access\n\u001b[0m\u001b[1;32m 2944\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2945\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m_call_flat\u001b[0;34m(self, args, captured_inputs, cancellation_manager)\u001b[0m\n\u001b[1;32m 1917\u001b[0m \u001b[0;31m# No tape is watching; skip to running the function.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1918\u001b[0m return self._build_call_outputs(self._inference_function.call(\n\u001b[0;32m-> 1919\u001b[0;31m ctx, args, cancellation_manager=cancellation_manager))\n\u001b[0m\u001b[1;32m 1920\u001b[0m forward_backward = self._select_forward_and_backward_functions(\n\u001b[1;32m 1921\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36mcall\u001b[0;34m(self, ctx, args, cancellation_manager)\u001b[0m\n\u001b[1;32m 558\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 559\u001b[0m \u001b[0mattrs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mattrs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 560\u001b[0;31m ctx=ctx)\n\u001b[0m\u001b[1;32m 561\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 562\u001b[0m outputs = execute.execute_with_cancellation(\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/execute.py\u001b[0m in \u001b[0;36mquick_execute\u001b[0;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[0mctx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mensure_initialized\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 59\u001b[0m tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,\n\u001b[0;32m---> 60\u001b[0;31m inputs, attrs, num_outputs)\n\u001b[0m\u001b[1;32m 61\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mcore\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 62\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mname\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " - ] - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "Uheg2abgn4N3", - "outputId": "c7728b4c-e39e-48bb-ed8e-1065795d57f1" - }, - "source": [ - "sample_text = ('Not good Not good Not good Not good Not good Not good Not good Not good ')\n", - "predictions = model.predict(np.array([sample_text]))\n", - "print(predictions)" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - "[[-0.14589255]]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 370 - }, - "id": "Qro_alGl5lTi", - "outputId": "50fd5293-6f44-41d7-d4d7-b28e30cfdeb5" - }, - "source": [ - "test_loss, test_acc = model.evaluate(test_dataset)\n", - "\n", - "print('Test Loss:', test_loss)\n", - "print('Test Accuracy:', test_acc)" - ], - "execution_count": null, - "outputs": [ - { - "output_type": "stream", - "text": [ - " 17/500 [>.............................] - ETA: 5:02 - loss: 0.7476 - accuracy: 0.5165" - ], - "name": "stdout" - }, - { - "output_type": "error", - "ename": "KeyboardInterrupt", - "evalue": "ignored", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mtest_loss\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_acc\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mevaluate\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mtest_dataset\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Test Loss:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_loss\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 4\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'Test Accuracy:'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtest_acc\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py\u001b[0m in \u001b[0;36mevaluate\u001b[0;34m(self, x, y, batch_size, verbose, sample_weight, steps, callbacks, max_queue_size, workers, use_multiprocessing, return_dict)\u001b[0m\n\u001b[1;32m 1387\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTrace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m'test'\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mstep_num\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0m_r\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1388\u001b[0m \u001b[0mcallbacks\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mon_test_batch_begin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstep\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1389\u001b[0;31m \u001b[0mtmp_logs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtest_function\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0miterator\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1390\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mdata_handler\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshould_sync\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1391\u001b[0m \u001b[0mcontext\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0masync_wait\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwds)\u001b[0m\n\u001b[1;32m 826\u001b[0m \u001b[0mtracing_count\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 827\u001b[0m \u001b[0;32mwith\u001b[0m \u001b[0mtrace\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTrace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_name\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mtm\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 828\u001b[0;31m \u001b[0mresult\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 829\u001b[0m \u001b[0mcompiler\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m\"xla\"\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_experimental_compile\u001b[0m \u001b[0;32melse\u001b[0m \u001b[0;34m\"nonXla\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 830\u001b[0m \u001b[0mnew_tracing_count\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexperimental_get_tracing_count\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/def_function.py\u001b[0m in \u001b[0;36m_call\u001b[0;34m(self, *args, **kwds)\u001b[0m\n\u001b[1;32m 860\u001b[0m \u001b[0;31m# In this case we have not created variables on the first call. So we can\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 861\u001b[0m \u001b[0;31m# run the first trace but we should fail if variables are created.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 862\u001b[0;31m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_stateful_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mkwds\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 863\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_created_variables\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 864\u001b[0m raise ValueError(\"Creating variables on a non-first call to a function\"\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m__call__\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m 2941\u001b[0m filtered_flat_args) = self._maybe_define_function(args, kwargs)\n\u001b[1;32m 2942\u001b[0m return graph_function._call_flat(\n\u001b[0;32m-> 2943\u001b[0;31m filtered_flat_args, captured_inputs=graph_function.captured_inputs) # pylint: disable=protected-access\n\u001b[0m\u001b[1;32m 2944\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 2945\u001b[0m \u001b[0;34m@\u001b[0m\u001b[0mproperty\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36m_call_flat\u001b[0;34m(self, args, captured_inputs, cancellation_manager)\u001b[0m\n\u001b[1;32m 1917\u001b[0m \u001b[0;31m# No tape is watching; skip to running the function.\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1918\u001b[0m return self._build_call_outputs(self._inference_function.call(\n\u001b[0;32m-> 1919\u001b[0;31m ctx, args, cancellation_manager=cancellation_manager))\n\u001b[0m\u001b[1;32m 1920\u001b[0m forward_backward = self._select_forward_and_backward_functions(\n\u001b[1;32m 1921\u001b[0m \u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/function.py\u001b[0m in \u001b[0;36mcall\u001b[0;34m(self, ctx, args, cancellation_manager)\u001b[0m\n\u001b[1;32m 558\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 559\u001b[0m \u001b[0mattrs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mattrs\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 560\u001b[0;31m ctx=ctx)\n\u001b[0m\u001b[1;32m 561\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 562\u001b[0m outputs = execute.execute_with_cancellation(\n", - "\u001b[0;32m/usr/local/lib/python3.7/dist-packages/tensorflow/python/eager/execute.py\u001b[0m in \u001b[0;36mquick_execute\u001b[0;34m(op_name, num_outputs, inputs, attrs, ctx, name)\u001b[0m\n\u001b[1;32m 58\u001b[0m \u001b[0mctx\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mensure_initialized\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 59\u001b[0m tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,\n\u001b[0;32m---> 60\u001b[0;31m inputs, attrs, num_outputs)\n\u001b[0m\u001b[1;32m 61\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0mcore\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_NotOkStatusException\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 62\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mname\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mnot\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " - ] - } - ] - } - ] -} diff --git a/Assignment_4/Team_1_Assignment_4.ipynb b/Assignment_4/Team_1_Assignment_4.ipynb deleted file mode 100644 index 6654c3a..0000000 --- a/Assignment_4/Team_1_Assignment_4.ipynb +++ /dev/null @@ -1,2736 +0,0 @@ -{ - "nbformat": 4, - "nbformat_minor": 0, - "metadata": { - "colab": { - "name": "Team-1_Assignment-4.ipynb", - "provenance": [], - "authorship_tag": "ABX9TyNvcxph+b9rGUW18/FNda0B", - "include_colab_link": true - }, - "kernelspec": { - "name": "python3", - "display_name": "Python 3" - }, - "language_info": { - "name": "python" - }, - "accelerator": "GPU", - "widgets": { - "application/vnd.jupyter.widget-state+json": { - "4ad78aefbff84edebff75c2bf2c74d40": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_2da75a9b724f4f28a46f8170bdc32531", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_884bf4a023e8484bbbeaa70c20b4ec42", - "IPY_MODEL_ae83be5c7169469b909990be9addcc57" - ] - } - }, - "2da75a9b724f4f28a46f8170bdc32531": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "884bf4a023e8484bbbeaa70c20b4ec42": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_5fb1868bcd8b443b988f998b196f1d66", - "_dom_classes": [], - "description": "Dl Completed...: 100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 1, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 1, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_7c2e054365bb46cf92c9ddf08788f0ad" - } - }, - "ae83be5c7169469b909990be9addcc57": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_22959b1ad40c4aa2b37bb88d2aa76d13", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 1/1 [00:01<00:00, 1.59s/ url]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_706074f3515b45bdb8a5f7cf0f70d3a8" - } - }, - "5fb1868bcd8b443b988f998b196f1d66": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "7c2e054365bb46cf92c9ddf08788f0ad": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "22959b1ad40c4aa2b37bb88d2aa76d13": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "706074f3515b45bdb8a5f7cf0f70d3a8": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "ada8db9b412641d3bd1687721f594b87": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_af55377115fe4861b982cadddfffbe81", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_27fe16e87a674938b1201cfda3923287", - "IPY_MODEL_29f862ac257d4f28a959f6345dbfa3cd" - ] - } - }, - "af55377115fe4861b982cadddfffbe81": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "27fe16e87a674938b1201cfda3923287": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_d9e3d2ad1c4c425898332157a800dbb3", - "_dom_classes": [], - "description": "Dl Size...: 100%", - "_model_name": "FloatProgressModel", - "bar_style": "success", - "max": 1, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 1, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_1de46342cae247908351d7dd4add92bd" - } - }, - "29f862ac257d4f28a959f6345dbfa3cd": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_1d52bab99f23409f98630325d9efa14d", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 80/80 [00:01<00:00, 51.18 MiB/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_905b14471b3643b58ba3830fa174f86c" - } - }, - "d9e3d2ad1c4c425898332157a800dbb3": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "1de46342cae247908351d7dd4add92bd": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "1d52bab99f23409f98630325d9efa14d": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "905b14471b3643b58ba3830fa174f86c": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "09300165c1894bd3a7b78c87fb250511": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_0149d8bb372343d283bb1829f96e28f4", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_4a26c23cc0824442ac0dcdd609992beb", - "IPY_MODEL_c007626f2c5e421abc2a4887a461e037" - ] - } - }, - "0149d8bb372343d283bb1829f96e28f4": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "4a26c23cc0824442ac0dcdd609992beb": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_0c816db6f8b34683b5aef0ac56f66072", - "_dom_classes": [], - "description": "", - "_model_name": "FloatProgressModel", - "bar_style": "info", - "max": 1, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 1, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_e0cacdf1eb5a44b78a4f66335be30317" - } - }, - "c007626f2c5e421abc2a4887a461e037": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_1d126277a2c44684a376962bbdfb56f6", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 25000/0 [00:12<00:00, 3739.95 examples/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_a02d50a7ae384886a967182e6981c44c" - } - }, - "0c816db6f8b34683b5aef0ac56f66072": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "e0cacdf1eb5a44b78a4f66335be30317": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "1d126277a2c44684a376962bbdfb56f6": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "a02d50a7ae384886a967182e6981c44c": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "1d361f1e76bc4c998b224c8416fc8a16": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_5b1315b6fd254acb94b58f87982f24aa", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_6cdcb40c2f5c46b28729a3520acb20a1", - "IPY_MODEL_ee43cf31a9f345eca70cc73227c5a169" - ] - } - }, - "5b1315b6fd254acb94b58f87982f24aa": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "6cdcb40c2f5c46b28729a3520acb20a1": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_6badcfd59faa48a793e71e27098d8f3d", - "_dom_classes": [], - "description": " 56%", - "_model_name": "FloatProgressModel", - "bar_style": "danger", - "max": 25000, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 13935, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_c2ceb26415204c9a852c94a0fcb53383" - } - }, - "ee43cf31a9f345eca70cc73227c5a169": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_089001ea663c4789b2fe323feb01d247", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 13935/25000 [00:00<00:00, 139348.14 examples/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_b0a27c6b82694ff992e8e97bfa984478" - } - }, - "6badcfd59faa48a793e71e27098d8f3d": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "c2ceb26415204c9a852c94a0fcb53383": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "089001ea663c4789b2fe323feb01d247": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "b0a27c6b82694ff992e8e97bfa984478": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "a9c4cc19d9dc4d40a3b17631d7448daa": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_f918a91dea72484e97e82e36d01932af", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_c6bd265dcadc4e9bb12b8c58a5db57ec", - "IPY_MODEL_305d41b438fb449b939b51cf6690c046" - ] - } - }, - "f918a91dea72484e97e82e36d01932af": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "c6bd265dcadc4e9bb12b8c58a5db57ec": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_f8d5981f5ea94bcc87dc57538e5bce29", - "_dom_classes": [], - "description": "", - "_model_name": "FloatProgressModel", - "bar_style": "info", - "max": 1, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 1, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_df89b9803f144e3890eddf8e16c7db0e" - } - }, - "305d41b438fb449b939b51cf6690c046": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_68387809e9794b8892a6db66d5371555", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 25000/0 [00:12<00:00, 3879.71 examples/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_72e10b7692aa4b39b6c620496ff7cefb" - } - }, - "f8d5981f5ea94bcc87dc57538e5bce29": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "df89b9803f144e3890eddf8e16c7db0e": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "68387809e9794b8892a6db66d5371555": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "72e10b7692aa4b39b6c620496ff7cefb": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "5b4f922e27d5488bafed266090322917": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_84d2f5ac34444b40a4d7a2bb2a952078", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_5c8868e32d1a42269361c8e628870217", - "IPY_MODEL_9c8858a60d48487aab7c5b6a6ae61187" - ] - } - }, - "84d2f5ac34444b40a4d7a2bb2a952078": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "5c8868e32d1a42269361c8e628870217": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_11d33dcc576b4c2199d3f9b0cac7f73d", - "_dom_classes": [], - "description": " 49%", - "_model_name": "FloatProgressModel", - "bar_style": "danger", - "max": 25000, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 12220, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_6afb891fccf24421bce1978a7fc244c1" - } - }, - "9c8858a60d48487aab7c5b6a6ae61187": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_ff0c7ee1bdc2406797f806967f179b44", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 12220/25000 [00:00<00:00, 122197.79 examples/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_eee3640504d847a4b820e8008c02aa41" - } - }, - "11d33dcc576b4c2199d3f9b0cac7f73d": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "6afb891fccf24421bce1978a7fc244c1": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "ff0c7ee1bdc2406797f806967f179b44": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "eee3640504d847a4b820e8008c02aa41": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "c40037b21be849b1802af03736a9e284": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_5c33ea7a8a0b4f749ce3d454c51ac1ed", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_ac68b1dacf324d7593e74366a0e7a67c", - "IPY_MODEL_c3970b6ca46e4a39b0916698e1f2047e" - ] - } - }, - "5c33ea7a8a0b4f749ce3d454c51ac1ed": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "ac68b1dacf324d7593e74366a0e7a67c": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_4fecb9db954b4e1682ac498bc1d86156", - "_dom_classes": [], - "description": "", - "_model_name": "FloatProgressModel", - "bar_style": "info", - "max": 1, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 1, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_db67c3ee1b4540d4973df43d2aa3f5ab" - } - }, - "c3970b6ca46e4a39b0916698e1f2047e": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_dcf12272a1ef4748b7e0da3913c64cb3", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 50000/0 [00:17<00:00, 3904.65 examples/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_f9096cc21ab840a08c2a2a381e2ff40b" - } - }, - "4fecb9db954b4e1682ac498bc1d86156": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "db67c3ee1b4540d4973df43d2aa3f5ab": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "dcf12272a1ef4748b7e0da3913c64cb3": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "f9096cc21ab840a08c2a2a381e2ff40b": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "dda52d9f72e4459daf5aa8ae0a591b2a": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HBoxModel", - "state": { - "_view_name": "HBoxView", - "_dom_classes": [], - "_model_name": "HBoxModel", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.5.0", - "box_style": "", - "layout": "IPY_MODEL_613af5ad3b1743a0a6602ac6f24b72c1", - "_model_module": "@jupyter-widgets/controls", - "children": [ - "IPY_MODEL_1aba18bef74746fb8d1d0b10977a4407", - "IPY_MODEL_24a2bb51bebf4be191eb3705fd59a669" - ] - } - }, - "613af5ad3b1743a0a6602ac6f24b72c1": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "1aba18bef74746fb8d1d0b10977a4407": { - "model_module": "@jupyter-widgets/controls", - "model_name": "FloatProgressModel", - "state": { - "_view_name": "ProgressView", - "style": "IPY_MODEL_fe86ad0ecefd414da364b9b1155c0dd3", - "_dom_classes": [], - "description": " 64%", - "_model_name": "FloatProgressModel", - "bar_style": "danger", - "max": 50000, - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": 32213, - "_view_count": null, - "_view_module_version": "1.5.0", - "orientation": "horizontal", - "min": 0, - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_3f1a46aaa8cd471189b401814aedf104" - } - }, - "24a2bb51bebf4be191eb3705fd59a669": { - "model_module": "@jupyter-widgets/controls", - "model_name": "HTMLModel", - "state": { - "_view_name": "HTMLView", - "style": "IPY_MODEL_7227ac721a2942cfbbc0282f0ac100da", - "_dom_classes": [], - "description": "", - "_model_name": "HTMLModel", - "placeholder": "​", - "_view_module": "@jupyter-widgets/controls", - "_model_module_version": "1.5.0", - "value": " 32213/50000 [00:00<00:00, 79302.13 examples/s]", - "_view_count": null, - "_view_module_version": "1.5.0", - "description_tooltip": null, - "_model_module": "@jupyter-widgets/controls", - "layout": "IPY_MODEL_f744483bfb4841a392b6255d8f48c763" - } - }, - "fe86ad0ecefd414da364b9b1155c0dd3": { - "model_module": "@jupyter-widgets/controls", - "model_name": "ProgressStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "ProgressStyleModel", - "description_width": "initial", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "bar_color": null, - "_model_module": "@jupyter-widgets/controls" - } - }, - "3f1a46aaa8cd471189b401814aedf104": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - }, - "7227ac721a2942cfbbc0282f0ac100da": { - "model_module": "@jupyter-widgets/controls", - "model_name": "DescriptionStyleModel", - "state": { - "_view_name": "StyleView", - "_model_name": "DescriptionStyleModel", - "description_width": "", - "_view_module": "@jupyter-widgets/base", - "_model_module_version": "1.5.0", - "_view_count": null, - "_view_module_version": "1.2.0", - "_model_module": "@jupyter-widgets/controls" - } - }, - "f744483bfb4841a392b6255d8f48c763": { - "model_module": "@jupyter-widgets/base", - "model_name": "LayoutModel", - "state": { - "_view_name": "LayoutView", - "grid_template_rows": null, - "right": null, - "justify_content": null, - "_view_module": "@jupyter-widgets/base", - "overflow": null, - "_model_module_version": "1.2.0", - "_view_count": null, - "flex_flow": null, - "width": null, - "min_width": null, - "border": null, - "align_items": null, - "bottom": null, - "_model_module": "@jupyter-widgets/base", - "top": null, - "grid_column": null, - "overflow_y": null, - "overflow_x": null, - "grid_auto_flow": null, - "grid_area": null, - "grid_template_columns": null, - "flex": null, - "_model_name": "LayoutModel", - "justify_items": null, - "grid_row": null, - "max_height": null, - "align_content": null, - "visibility": null, - "align_self": null, - "height": null, - "min_height": null, - "padding": null, - "grid_auto_rows": null, - "grid_gap": null, - "max_width": null, - "order": null, - "_view_module_version": "1.2.0", - "grid_template_areas": null, - "object_position": null, - "object_fit": null, - "grid_auto_columns": null, - "margin": null, - "display": null, - "left": null - } - } - } - } - }, - "cells": [ - { - "cell_type": "markdown", - "metadata": { - "id": "view-in-github", - "colab_type": "text" - }, - "source": [ - "\"Open" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "zn5lhb2v_YZC" - }, - "source": [ - "#Wikipedia Simplifier Assignment 4 (Sentiment classification using tesorflow)" - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "FNiDkDht-Ulx" - }, - "source": [ - "\n", - "\n", - "---\n", - "\n", - "\n", - "**Team 1:
**\n", - "Vaibhav goyal
\n", - "Alisha
\n", - "kajal deep\n", - "\n", - "\n", - "---\n", - "\n", - "\n", - "\n", - "Mentors:
\n", - "Ananya Gupta
\n", - "Rishabh dugaye\n", - "\n", - "\n", - "---\n", - "\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "id": "Mep1UYlG0vme" - }, - "source": [ - "# importing neccesary libraries\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "import tensorflow as tf\n", - "import tensorflow_datasets as tfds\n" - ], - "execution_count": 1, - "outputs": [] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 405, - "referenced_widgets": [ - "4ad78aefbff84edebff75c2bf2c74d40", - "2da75a9b724f4f28a46f8170bdc32531", - "884bf4a023e8484bbbeaa70c20b4ec42", - "ae83be5c7169469b909990be9addcc57", - "5fb1868bcd8b443b988f998b196f1d66", - "7c2e054365bb46cf92c9ddf08788f0ad", - "22959b1ad40c4aa2b37bb88d2aa76d13", - "706074f3515b45bdb8a5f7cf0f70d3a8", - "ada8db9b412641d3bd1687721f594b87", - "af55377115fe4861b982cadddfffbe81", - "27fe16e87a674938b1201cfda3923287", - "29f862ac257d4f28a959f6345dbfa3cd", - "d9e3d2ad1c4c425898332157a800dbb3", - "1de46342cae247908351d7dd4add92bd", - "1d52bab99f23409f98630325d9efa14d", - "905b14471b3643b58ba3830fa174f86c", - "09300165c1894bd3a7b78c87fb250511", - "0149d8bb372343d283bb1829f96e28f4", - "4a26c23cc0824442ac0dcdd609992beb", - "c007626f2c5e421abc2a4887a461e037", - "0c816db6f8b34683b5aef0ac56f66072", - "e0cacdf1eb5a44b78a4f66335be30317", - "1d126277a2c44684a376962bbdfb56f6", - "a02d50a7ae384886a967182e6981c44c", - "1d361f1e76bc4c998b224c8416fc8a16", - "5b1315b6fd254acb94b58f87982f24aa", - "6cdcb40c2f5c46b28729a3520acb20a1", - "ee43cf31a9f345eca70cc73227c5a169", - "6badcfd59faa48a793e71e27098d8f3d", - "c2ceb26415204c9a852c94a0fcb53383", - "089001ea663c4789b2fe323feb01d247", - "b0a27c6b82694ff992e8e97bfa984478", - "a9c4cc19d9dc4d40a3b17631d7448daa", - "f918a91dea72484e97e82e36d01932af", - "c6bd265dcadc4e9bb12b8c58a5db57ec", - "305d41b438fb449b939b51cf6690c046", - "f8d5981f5ea94bcc87dc57538e5bce29", - "df89b9803f144e3890eddf8e16c7db0e", - "68387809e9794b8892a6db66d5371555", - "72e10b7692aa4b39b6c620496ff7cefb", - "5b4f922e27d5488bafed266090322917", - "84d2f5ac34444b40a4d7a2bb2a952078", - "5c8868e32d1a42269361c8e628870217", - "9c8858a60d48487aab7c5b6a6ae61187", - "11d33dcc576b4c2199d3f9b0cac7f73d", - "6afb891fccf24421bce1978a7fc244c1", - "ff0c7ee1bdc2406797f806967f179b44", - "eee3640504d847a4b820e8008c02aa41", - "c40037b21be849b1802af03736a9e284", - "5c33ea7a8a0b4f749ce3d454c51ac1ed", - "ac68b1dacf324d7593e74366a0e7a67c", - "c3970b6ca46e4a39b0916698e1f2047e", - "4fecb9db954b4e1682ac498bc1d86156", - "db67c3ee1b4540d4973df43d2aa3f5ab", - "dcf12272a1ef4748b7e0da3913c64cb3", - "f9096cc21ab840a08c2a2a381e2ff40b", - "dda52d9f72e4459daf5aa8ae0a591b2a", - "613af5ad3b1743a0a6602ac6f24b72c1", - "1aba18bef74746fb8d1d0b10977a4407", - "24a2bb51bebf4be191eb3705fd59a669", - "fe86ad0ecefd414da364b9b1155c0dd3", - "3f1a46aaa8cd471189b401814aedf104", - "7227ac721a2942cfbbc0282f0ac100da", - "f744483bfb4841a392b6255d8f48c763" - ] - }, - "id": "5_-UaYTr1Iwk", - "outputId": "721399c7-472f-4b52-a65c-d6d5ac21f43f" - }, - "source": [ - "dataset, info = tfds.load('imdb_reviews', with_info=True, as_supervised=True) # getting the dataset preprocessed using tfds\n", - "train_dataset, test_dataset = dataset['train'], dataset['test']\n", - "train_dataset.element_spec" - ], - "execution_count": 2, - "outputs": [ - { - "output_type": "stream", - "text": [ - "\u001b[1mDownloading and preparing dataset imdb_reviews/plain_text/1.0.0 (download: 80.23 MiB, generated: Unknown size, total: 80.23 MiB) to /root/tensorflow_datasets/imdb_reviews/plain_text/1.0.0...\u001b[0m\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "4ad78aefbff84edebff75c2bf2c74d40", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=1.0, bar_style='info', description='Dl Completed...', max=1.0, style=Progre…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "ada8db9b412641d3bd1687721f594b87", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=1.0, bar_style='info', description='Dl Size...', max=1.0, style=ProgressSty…" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\n", - "\n", - "\n", - "\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "09300165c1894bd3a7b78c87fb250511", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=1.0, bar_style='info', max=1.0), HTML(value='')))" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\rShuffling and writing examples to /root/tensorflow_datasets/imdb_reviews/plain_text/1.0.0.incompleteCBJG8M/imdb_reviews-train.tfrecord\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "1d361f1e76bc4c998b224c8416fc8a16", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, max=25000.0), HTML(value='')))" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\r" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "a9c4cc19d9dc4d40a3b17631d7448daa", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=1.0, bar_style='info', max=1.0), HTML(value='')))" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\rShuffling and writing examples to /root/tensorflow_datasets/imdb_reviews/plain_text/1.0.0.incompleteCBJG8M/imdb_reviews-test.tfrecord\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "5b4f922e27d5488bafed266090322917", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, max=25000.0), HTML(value='')))" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\r" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "c40037b21be849b1802af03736a9e284", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=1.0, bar_style='info', max=1.0), HTML(value='')))" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "\rShuffling and writing examples to /root/tensorflow_datasets/imdb_reviews/plain_text/1.0.0.incompleteCBJG8M/imdb_reviews-unsupervised.tfrecord\n" - ], - "name": "stdout" - }, - { - "output_type": "display_data", - "data": { - "application/vnd.jupyter.widget-view+json": { - "model_id": "dda52d9f72e4459daf5aa8ae0a591b2a", - "version_minor": 0, - "version_major": 2 - }, - "text/plain": [ - "HBox(children=(FloatProgress(value=0.0, max=50000.0), HTML(value='')))" - ] - }, - "metadata": { - "tags": [] - } - }, - { - "output_type": "stream", - "text": [ - "WARNING:absl:Dataset is using deprecated text encoder API which will be removed soon. Please use the plain_text version of the dataset and migrate to `tensorflow_text`.\n" - ], - "name": "stderr" - }, - { - "output_type": "stream", - "text": [ - "\u001b[1mDataset imdb_reviews downloaded and prepared to /root/tensorflow_datasets/imdb_reviews/plain_text/1.0.0. Subsequent calls will reuse this data.\u001b[0m\n", - "\r" - ], - "name": "stdout" - }, - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "(TensorSpec(shape=(), dtype=tf.string, name=None),\n", - " TensorSpec(shape=(), dtype=tf.int64, name=None))" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 2 - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "6juiBOxe1Mpp", - "outputId": "a8c6ec18-5ee1-4483-8176-823ef376c944" - }, - "source": [ - "for example, label in train_dataset.take(1): # printing an example entry of dataset\n", - " print('Text: ', example.numpy())\n", - " print('Label: ', label.numpy())\n", - "\n", - "# shuffling the train dataset\n", - "\n", - "BUFFER_SIZE = 10000 \n", - "BATCH_SIZE = 64\n", - "\n", - "train_dataset = train_dataset.shuffle(BUFFER_SIZE).batch(BATCH_SIZE).prefetch(tf.data.AUTOTUNE)\n", - "test_dataset = test_dataset.batch(BATCH_SIZE).prefetch(tf.data.AUTOTUNE)\n", - "\n", - "for example, label in train_dataset.take(1): # dataset after shuffling and coverted into batches\n", - " print('Text: ', example.numpy()[0:3])\n", - " print()\n", - " print('Label: ', label.numpy()[0:3])" - ], - "execution_count": 3, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Text: b\"This was an absolutely terrible movie. Don't be lured in by Christopher Walken or Michael Ironside. Both are great actors, but this must simply be their worst role in history. Even their great acting could not redeem this movie's ridiculous storyline. This movie is an early nineties US propaganda piece. The most pathetic scenes were those when the Columbian rebels were making their cases for revolutions. Maria Conchita Alonso appeared phony, and her pseudo-love affair with Walken was nothing but a pathetic emotional plug in a movie that was devoid of any real meaning. I am disappointed that there are movies like this, ruining actor's like Christopher Walken's good name. I could barely sit through it.\"\n", - "Label: 0\n", - "Text: [b'I am not going to spoil the contents to anyone, who has not yet watched this humble masterpiece by Kay Pollak.

A world famous conductor brilliantly played by Michael Nyqvist seeks peace from stress by moving back to his childhood village. The villagers, who has followed the genius in silence, are slowly tempting him to share of his greatness.

Each role in this movie, has a very specific purpose and shows a remarkable potential in each of the actors playing their own chord in short but precise words, a symphony of love.

Not love in the sense of relationship, but in the tone of the spirit deeply buried within each of the characters, each revealing their own present story, their needs, their skeletons, desires and much more.

I shall not forget to mention, the two main parts played by Frida Hallgren and Michael Nyqvist, whose dramas are played in unforgettable harmonies of emotional feedback. They touch each other with a pain connected in their own disability to love themselves.

Michael Nyqvist is really put to the test here in a very difficult setup, in one of those movies that either end up as catastrophic or fantastic. And fantastic it became from start to end, not one second less or more than enough, you are left with a feeling of change and a taste for more.

To this day, definitely one of the best movies I have had the pleasure of watching.'\n", - " b\"It seems that several of the people who have reviewed this movie only watched it in the first place because it was filmed near where they live. How's that for a ringing endorsement? If this movie was filmed near where I lived I wouldn't be mentioning it in my review. It is horrid! Several reviews state that this film is a spoof or tongue-in-cheek horror movie, it is neither. It is sad to see this film reviewed as a comedy as that makes it not only a bad attempt at a horror film but as a comedy as well. I did laugh though, at how unbelievably bad the film was.

This movie has 2 good things going for it, the mask and the weapon of choice, unfortunately it would have been more interesting watching an hour and a half of the mask and weapon laying on a table then watching this garbage. The social commentary behind the film is also laughable, juvenile and stupid. Don't bother with this movie, you've already wasted time reading this review don't waste anymore on this movie. Arrggghhh! It's infuriating that movies like this even get made. I was expecting the entire cast a crew to be credited to Alan Smithee, a name used when a person, usually a director, doesn't want to be credited with a movie because it's so bad.

There is nothing redeeming in this movie, I spent $1.19 on the rental and feel I was ripped off. Avoid. 1 out of 10\"\n", - " b\"When I first saw this show, I thought it looked interesting. I watched it, saw how it revolved around Sarah, like the character sees the world...revolving around her. I got it, but wasn't laughing very much.

Onstage and in her show, she's racist, crude, insensitive and hugely self-centered. I didn't get her at first, and took it all at face value. Then I got to see her movie, Jesus is Magic. I think that served as a Sarah Silverman primer for me, explaining to me just what 'language' she's speaking. She's like Marilyn Manson, working so hard to give us a faceful of horrible ideas and images, but you eventually realise it isn't an assault, it's a statement. And once you understand that, you find you're glad someone's finally giving it to you straight.

I don't mean to suggest only smart people will understand, or that to hate this show is to prove your idiocy. While I like a lot of 'smart' shows, I still to this day do not see the humor of Curb Your Enthusiasm. I get the impression that it's good, but I just don't get it. Many people will never get the Sarah Silverman Program, but I'm glad I eventually came around.

The creators of this show do work hard, every episode is loaded not just with dialogue and plot, but with songs, or dream sequences, production numbers. These people aren't putting together something to fill a time slot and please advertisers, they appear to be on a mission to make the best show they can put together. If I was to predict the future of this show, I'd say it will go the way of Arrested Development and Freaks & Geeks. It will get canceled before it's time and live on in fans' hearts and on DVD. But take heart, SSP creators, your audience IS out there, and we'll be watching for as long as they let you make the show.\"]\n", - "\n", - "Label: [1 0 1]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "dHbvi8Cq4y_j", - "outputId": "d0c4dc8b-e56c-475a-d9eb-67e4cd8165f5" - }, - "source": [ - "# making encoder to tokenize the data and add padding\n", - "\n", - "VOCAB_SIZE = 1000\n", - "encoder = tf.keras.layers.experimental.preprocessing.TextVectorization(max_tokens=VOCAB_SIZE)\n", - "encoder.adapt(train_dataset.map(lambda text, label:text))\n", - "vocab = np.array(encoder.get_vocabulary())\n", - "print(vocab[0:20])\n", - "\n", - "encoded_example = encoder(example)[0:3].numpy() #printing a sample encoding example\n", - "print(encoded_example)" - ], - "execution_count": 4, - "outputs": [ - { - "output_type": "stream", - "text": [ - "['' '[UNK]' 'the' 'and' 'a' 'of' 'to' 'is' 'in' 'it' 'i' 'this' 'that'\n", - " 'br' 'was' 'as' 'for' 'with' 'movie' 'but']\n", - "[[ 10 237 22 ... 0 0 0]\n", - " [ 9 181 12 ... 0 0 0]\n", - " [ 51 10 86 ... 0 0 0]]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "5rTgsZt28hRB", - "outputId": "5d4cf252-09d3-43a3-908f-cdb01ceff5ff" - }, - "source": [ - "# tokenization removes punctuation and some words that are not as frequent.\n", - "for n in range(3):\n", - " print(\"Original: \", example[n].numpy())\n", - " print(\"Round-trip: \", \" \".join(vocab[encoded_example[n]]))\n", - " print()" - ], - "execution_count": 5, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Original: b'I am not going to spoil the contents to anyone, who has not yet watched this humble masterpiece by Kay Pollak.

A world famous conductor brilliantly played by Michael Nyqvist seeks peace from stress by moving back to his childhood village. The villagers, who has followed the genius in silence, are slowly tempting him to share of his greatness.

Each role in this movie, has a very specific purpose and shows a remarkable potential in each of the actors playing their own chord in short but precise words, a symphony of love.

Not love in the sense of relationship, but in the tone of the spirit deeply buried within each of the characters, each revealing their own present story, their needs, their skeletons, desires and much more.

I shall not forget to mention, the two main parts played by Frida Hallgren and Michael Nyqvist, whose dramas are played in unforgettable harmonies of emotional feedback. They touch each other with a pain connected in their own disability to love themselves.

Michael Nyqvist is really put to the test here in a very difficult setup, in one of those movies that either end up as catastrophic or fantastic. And fantastic it became from start to end, not one second less or more than enough, you are left with a feeling of change and a taste for more.

To this day, definitely one of the best movies I have had the pleasure of watching.'\n", - "Round-trip: i am not going to [UNK] the [UNK] to anyone who has not yet watched this [UNK] [UNK] by [UNK] [UNK] br a world famous [UNK] [UNK] played by michael [UNK] [UNK] [UNK] from [UNK] by moving back to his [UNK] [UNK] the [UNK] who has [UNK] the [UNK] in [UNK] are [UNK] [UNK] him to [UNK] of his [UNK] br each role in this movie has a very [UNK] [UNK] and shows a [UNK] potential in each of the actors playing their own [UNK] in short but [UNK] words a [UNK] of [UNK] br not love in the sense of relationship but in the [UNK] of the [UNK] [UNK] [UNK] within each of the characters each [UNK] their own present story their needs their [UNK] [UNK] and much [UNK] br i [UNK] not forget to mention the two main parts played by [UNK] [UNK] and michael [UNK] whose [UNK] are played in [UNK] [UNK] of emotional [UNK] they [UNK] each other with a [UNK] [UNK] in their own [UNK] to love [UNK] br michael [UNK] is really put to the [UNK] here in a very difficult [UNK] in one of those movies that either end up as [UNK] or fantastic and fantastic it became from start to end not one second less or more than enough you are left with a feeling of change and a [UNK] for [UNK] br to this day definitely one of the best movies i have had the [UNK] of watching \n", - "\n", - "Original: b\"It seems that several of the people who have reviewed this movie only watched it in the first place because it was filmed near where they live. How's that for a ringing endorsement? If this movie was filmed near where I lived I wouldn't be mentioning it in my review. It is horrid! Several reviews state that this film is a spoof or tongue-in-cheek horror movie, it is neither. It is sad to see this film reviewed as a comedy as that makes it not only a bad attempt at a horror film but as a comedy as well. I did laugh though, at how unbelievably bad the film was.

This movie has 2 good things going for it, the mask and the weapon of choice, unfortunately it would have been more interesting watching an hour and a half of the mask and weapon laying on a table then watching this garbage. The social commentary behind the film is also laughable, juvenile and stupid. Don't bother with this movie, you've already wasted time reading this review don't waste anymore on this movie. Arrggghhh! It's infuriating that movies like this even get made. I was expecting the entire cast a crew to be credited to Alan Smithee, a name used when a person, usually a director, doesn't want to be credited with a movie because it's so bad.

There is nothing redeeming in this movie, I spent $1.19 on the rental and feel I was ripped off. Avoid. 1 out of 10\"\n", - "Round-trip: it seems that several of the people who have [UNK] this movie only watched it in the first place because it was filmed near where they live [UNK] that for a [UNK] [UNK] if this movie was filmed near where i [UNK] i wouldnt be [UNK] it in my review it is [UNK] several reviews [UNK] that this film is a [UNK] or [UNK] horror movie it is [UNK] it is sad to see this film [UNK] as a comedy as that makes it not only a bad attempt at a horror film but as a comedy as well i did laugh though at how [UNK] bad the film [UNK] br this movie has 2 good things going for it the [UNK] and the [UNK] of [UNK] unfortunately it would have been more interesting watching an hour and a half of the [UNK] and [UNK] [UNK] on a [UNK] then watching this [UNK] the [UNK] [UNK] behind the film is also [UNK] [UNK] and stupid dont [UNK] with this movie youve already [UNK] time reading this review dont waste [UNK] on this movie [UNK] its [UNK] that movies like this even get made i was expecting the entire cast a [UNK] to be [UNK] to [UNK] [UNK] a name used when a person usually a director doesnt want to be [UNK] with a movie because its so [UNK] br there is nothing [UNK] in this movie i [UNK] [UNK] on the [UNK] and feel i was [UNK] off avoid 1 out of 10 \n", - "\n", - "Original: b\"When I first saw this show, I thought it looked interesting. I watched it, saw how it revolved around Sarah, like the character sees the world...revolving around her. I got it, but wasn't laughing very much.

Onstage and in her show, she's racist, crude, insensitive and hugely self-centered. I didn't get her at first, and took it all at face value. Then I got to see her movie, Jesus is Magic. I think that served as a Sarah Silverman primer for me, explaining to me just what 'language' she's speaking. She's like Marilyn Manson, working so hard to give us a faceful of horrible ideas and images, but you eventually realise it isn't an assault, it's a statement. And once you understand that, you find you're glad someone's finally giving it to you straight.

I don't mean to suggest only smart people will understand, or that to hate this show is to prove your idiocy. While I like a lot of 'smart' shows, I still to this day do not see the humor of Curb Your Enthusiasm. I get the impression that it's good, but I just don't get it. Many people will never get the Sarah Silverman Program, but I'm glad I eventually came around.

The creators of this show do work hard, every episode is loaded not just with dialogue and plot, but with songs, or dream sequences, production numbers. These people aren't putting together something to fill a time slot and please advertisers, they appear to be on a mission to make the best show they can put together. If I was to predict the future of this show, I'd say it will go the way of Arrested Development and Freaks & Geeks. It will get canceled before it's time and live on in fans' hearts and on DVD. But take heart, SSP creators, your audience IS out there, and we'll be watching for as long as they let you make the show.\"\n", - "Round-trip: when i first saw this show i thought it looked interesting i watched it saw how it [UNK] around [UNK] like the character [UNK] the [UNK] around her i got it but wasnt [UNK] very [UNK] br [UNK] and in her show shes [UNK] [UNK] [UNK] and [UNK] [UNK] i didnt get her at first and took it all at face [UNK] then i got to see her movie [UNK] is [UNK] i think that [UNK] as a [UNK] [UNK] [UNK] for me [UNK] to me just what [UNK] shes [UNK] shes like [UNK] [UNK] working so hard to give us a [UNK] of horrible ideas and [UNK] but you eventually [UNK] it isnt an [UNK] its a [UNK] and once you understand that you find youre [UNK] [UNK] finally giving it to you [UNK] br i dont mean to [UNK] only [UNK] people will understand or that to hate this show is to [UNK] your [UNK] while i like a lot of [UNK] shows i still to this day do not see the humor of [UNK] your [UNK] i get the [UNK] that its good but i just dont get it many people will never get the [UNK] [UNK] [UNK] but im [UNK] i eventually came [UNK] br the [UNK] of this show do work hard every episode is [UNK] not just with dialogue and plot but with songs or dream sequences production [UNK] these people arent [UNK] together something to [UNK] a time [UNK] and please [UNK] they appear to be on a [UNK] to make the best show they can put together if i was to [UNK] the future of this show id say it will go the way of [UNK] development and [UNK] [UNK] it will get [UNK] before its time and live on in fans [UNK] and on dvd but take heart [UNK] [UNK] your audience is out there and well be watching for as long as they let you make the show \n", - "\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "mD1uVs1fDksV" - }, - "source": [ - "#Building the model\n" - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "G1gPfcBg9tWR", - "outputId": "31d74b19-1704-4129-dd34-b7f2805c8b1a" - }, - "source": [ - "# defining the RNN model using Sequential()\n", - "\n", - "model = tf.keras.Sequential([\n", - " encoder,\n", - " tf.keras.layers.Embedding(\n", - " input_dim=len(encoder.get_vocabulary()),\n", - " output_dim=64,\n", - " mask_zero=True \n", - " ),\n", - " tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64)),\n", - " tf.keras.layers.Dense(64, activation='relu'),\n", - " tf.keras.layers.Dense(1)\n", - "])\n", - "\n", - "# passing an sample for checking the masking effect .\n", - "sample_text = ('This movie was cool. I really liked that movie. The animations and graphics are out of the world i definitely recommend that movie')\n", - "\n", - "predictions = model.predict(np.array([sample_text]))\n", - "print(predictions)\n", - "\n", - "padding = 'the'*2000\n", - "predictions = model.predict(np.array([sample_text, padding]))\n", - "print(predictions)" - ], - "execution_count": 6, - "outputs": [ - { - "output_type": "stream", - "text": [ - "[[-0.00897167]]\n", - "[[-0.00897167]\n", - " [ 0.00113963]]\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "zTpc_jKq_t2-", - "outputId": "a57a35fd-bcc2-4e66-ffed-7b25b7be772f" - }, - "source": [ - "# comling model by defining loss function and optimier function\n", - "\n", - "model.compile(\n", - " loss=tf.keras.losses.BinaryCrossentropy(from_logits=True),\n", - " optimizer=tf.keras.optimizers.Adam(1e-4),\n", - " metrics=['accuracy']\n", - ")\n", - "\n", - "model.summary()" - ], - "execution_count": 7, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Model: \"sequential\"\n", - "_________________________________________________________________\n", - "Layer (type) Output Shape Param # \n", - "=================================================================\n", - "text_vectorization (TextVect (None, None) 0 \n", - "_________________________________________________________________\n", - "embedding (Embedding) (None, None, 64) 64000 \n", - "_________________________________________________________________\n", - "bidirectional (Bidirectional (None, 128) 66048 \n", - "_________________________________________________________________\n", - "dense (Dense) (None, 64) 8256 \n", - "_________________________________________________________________\n", - "dense_1 (Dense) (None, 1) 65 \n", - "=================================================================\n", - "Total params: 138,369\n", - "Trainable params: 138,369\n", - "Non-trainable params: 0\n", - "_________________________________________________________________\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "yKeMOUvjDZcs" - }, - "source": [ - "#87.2% accuracy on train data" - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "feTwn9LDA0eJ", - "outputId": "ab41785f-ef4a-4d90-a393-e7a5d570fdd8" - }, - "source": [ - "# training the model on training_dataset\n", - "\n", - "history = model.fit(train_dataset, epochs=10, validation_data=test_dataset, validation_steps=30)" - ], - "execution_count": 8, - "outputs": [ - { - "output_type": "stream", - "text": [ - "Epoch 1/10\n", - "391/391 [==============================] - 38s 82ms/step - loss: 0.6820 - accuracy: 0.5135 - val_loss: 0.4819 - val_accuracy: 0.7474\n", - "Epoch 2/10\n", - "391/391 [==============================] - 30s 77ms/step - loss: 0.4431 - accuracy: 0.7900 - val_loss: 0.4110 - val_accuracy: 0.8005\n", - "Epoch 3/10\n", - "391/391 [==============================] - 30s 76ms/step - loss: 0.3653 - accuracy: 0.8471 - val_loss: 0.3587 - val_accuracy: 0.8266\n", - "Epoch 4/10\n", - "391/391 [==============================] - 30s 77ms/step - loss: 0.3419 - accuracy: 0.8529 - val_loss: 0.3382 - val_accuracy: 0.8594\n", - "Epoch 5/10\n", - "391/391 [==============================] - 31s 77ms/step - loss: 0.3257 - accuracy: 0.8624 - val_loss: 0.3308 - val_accuracy: 0.8453\n", - "Epoch 6/10\n", - "391/391 [==============================] - 30s 77ms/step - loss: 0.3057 - accuracy: 0.8680 - val_loss: 0.3256 - val_accuracy: 0.8557\n", - "Epoch 7/10\n", - "391/391 [==============================] - 30s 77ms/step - loss: 0.3014 - accuracy: 0.8719 - val_loss: 0.3215 - val_accuracy: 0.8547\n", - "Epoch 8/10\n", - "391/391 [==============================] - 30s 77ms/step - loss: 0.3022 - accuracy: 0.8717 - val_loss: 0.3217 - val_accuracy: 0.8589\n", - "Epoch 9/10\n", - "391/391 [==============================] - 30s 77ms/step - loss: 0.3001 - accuracy: 0.8734 - val_loss: 0.3200 - val_accuracy: 0.8646\n", - "Epoch 10/10\n", - "391/391 [==============================] - 30s 77ms/step - loss: 0.2981 - accuracy: 0.8722 - val_loss: 0.3217 - val_accuracy: 0.8620\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "markdown", - "metadata": { - "id": "VCkwrjYHDSyz" - }, - "source": [ - "#86.4% accuracy on test data" - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "uWZ_2b6wDovP", - "outputId": "619b319e-d413-4297-e876-0d3ba17d7be1" - }, - "source": [ - "# checking loss on test_dataset\n", - "\n", - "test_loss, test_acc = model.evaluate(test_dataset)\n", - "print(\"Test loss: \", test_loss)\n", - "print(\"Test accuracy: \", test_acc)" - ], - "execution_count": 9, - "outputs": [ - { - "output_type": "stream", - "text": [ - "391/391 [==============================] - 15s 38ms/step - loss: 0.3193 - accuracy: 0.8642\n", - "Test loss: 0.3192571699619293\n", - "Test accuracy: 0.8641600012779236\n" - ], - "name": "stdout" - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/", - "height": 515 - }, - "id": "G0QS7VxlCep3", - "outputId": "768a1357-4c25-474b-e110-2e468ba367bd" - }, - "source": [ - "# plotting the graphs \n", - "\n", - "def plot_graph(history, metric):\n", - " plt.plot(history.history[metric])\n", - " plt.plot(history.history['val_'+metric], '')\n", - " plt.xlabel(\"Epochs\")\n", - " plt.ylabel(metric)\n", - " plt.legend([metric, 'validation_'+metric])\n", - "\n", - "plt.figure(figsize=(16, 8))\n", - "plt.subplot(1, 2, 1)\n", - "plot_graph(history, 'accuracy')\n", - "plt.subplot(1, 2, 2)\n", - "plot_graph(history, 'loss')\n", - "plt.ylim(0, None)" - ], - "execution_count": 17, - "outputs": [ - { - "output_type": "execute_result", - "data": { - "text/plain": [ - "(0.0, 0.6578917190432548)" - ] - }, - "metadata": { - "tags": [] - }, - "execution_count": 17 - }, - { - "output_type": "display_data", - "data": { - "image/png": "iVBORw0KGgoAAAANSUhEUgAAA7YAAAHgCAYAAACRsvFbAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdd3xV9f3H8dc3e5MNQiBh771UVOJeDBWVpYwKtlalttb+aKutq61Va22r1SJuBURcqAgqgjgQGRJk771CEgjZyc3398dJIMEAgdybe5O8n4/HfeTee875nk/QB5f3/Z7z/RhrLSIiIiIiIiJ1lZ+3CxARERERERGpCQVbERERERERqdMUbEVERERERKROU7AVERERERGROk3BVkREREREROo0BVsRERERERGp0wK8XYC7xMfH25SUFG+XISIi9cTy5csPWWsTvF1HXabPZhERcadTfTbXm2CbkpLCsmXLvF2GiIjUE8aYHd6uoa7TZ7OIiLjTqT6bdSmyiIiIiIiI1GkKtiIiIiIiIlKnKdiKiIiIiIhInVZv7rEVERERERGpSnFxMbt376agoMDbpUg1hISEkJSURGBgYLWPUbAVEREREZF6bffu3URGRpKSkoIxxtvlyClYa8nIyGD37t20bNmy2sfpUmQREREREanXCgoKiIuLU6itA4wxxMXFnfHsuoKtiIiIiIjUewq1dcfZ/LdSsBUREREREfGwiIgIb5dQrynYioiIiM8qLbW88d0OPl97wNuliIiID1OwFREREZ/l52d4+ZttvLp4u7dLERFxC2st9913H126dKFr16689dZbAOzbt4+LLrqIHj160KVLF7766itcLhfjxo07tu8///lPL1fvu7QqsoiIiPi01PaJvL54B3lFJYQF6Z8uIlIzD324hrV7s906ZqemUfx5cOdq7fvuu++ycuVK0tLSOHToEH379uWiiy5i2rRpXHnllfzxj3/E5XKRl5fHypUr2bNnD6tXrwbg8OHDbq27PtGMrYiIiPi01PYJFLlKWbwlw9uliIjU2Ndff83IkSPx9/encePGDBw4kKVLl9K3b19efvllHnzwQX788UciIyNp1aoVW7du5e6772bu3LlERUV5u3yfpa89RURExKf1axlLaKA/Czekc2nHxt4uR0TquOrOrNa2iy66iEWLFvHxxx8zbtw4fvOb3zBmzBjS0tKYN28ezz//PDNnzuSll17ydqk+STO2IiIi4tOCA/w5v3UcCzYcxFrr7XJERGrkwgsv5K233sLlcpGens6iRYvo168fO3bsoHHjxkycOJEJEyawYsUKDh06RGlpKcOGDePRRx9lxYoV3i7fZ2nGVkRERHxeaodE5q8/yJb0XNokqmWGiNRd119/PYsXL6Z79+4YY3j88cdp0qQJr776Kk888QSBgYFERETw2muvsWfPHsaPH09paSkAf/vb37xcve9SsBURERGfl9ouAYCFGw4q2IpInZSTkwOAMYYnnniCJ554otL2sWPHMnbs2J8cp1na6tGlyCIiUm/kFZVwtKDY22WIBzSPDaN1Qjhfbkz3dikiIuKDNGMrIiI+yVpLbpGLjJxCMnKLyMwpIjO3yHmeW/Ze2SMjp4iM3EIKikv52YCW/GlwJ2+XLx6gtj8iInIy+lQQEakFpaWWrLwiDmQXcuBoAenZhRzILuDA0QIOZhdy8GghFogMDiA82J/w4ICy584jMiSA8KAKz4MDiCjbLyLY2ebnZ7z9a56StZbsgpKyMFpYFkaPB9OfhNXcIopKSqscKzjAj/iIYGLDg4gND6JNQoTzPCKIns1javk3k9qS2j6BF7/exuItGVodWUREKlGwFRGpgfLAevCoE1SdkFrgBNjsAg4eLeRg2c+S0p+u5hoTFkjjqBASIoMxxpBTUEz60UJyCkvIKSwht7CkyuOqEhbkT0R50K3005+I8jAcFHD8eYX9IsoCdWRwIOHB/gT4n/5OldJSy5H84gph9PjMakbFmdWymdasvCKKXVX/LmFB/sSGBxEXHkTjqBA6nhNFXFlojQ0PIi4iiNjw4GPvhQX5Y4xvB3lxP7X9ERGRk1GwFRGpgrWWrLziSiE1vSy8Hg+sToitKqxFhwXSODKExKhgWifE0zgqmMTIYBpHhZAYFULjqGASIoMJDvA/bR2FJaXHQm5OYQk5BSXkFpWQU+hynheWcLRs+4nPd2flkVtUQm7ZvkWuqmdATxQc4HdsZji8LAxHBAeQX+Qqm00tJCuvGNdJQndkcACxEU4ITYoJpVtSI+IigiuH1fBgYiOcMBsSeOo/BxFw2v4MaHO87Y++3BARkXIKtiLSoFhrOZxXfOwS4IqzquWXCR/MLiT9aGGVIbBRaGBZSA2hf6twGkeF0Dgy+FhYTYx0Zl/dFdSMMYQE+hMS6E98RHCNxysqKT0ekKsIwkcLnBCcW1T+/Pg+B7ILCA30JzkujF7J0WUB1QmrcRHHw2pMeOBpA7vI2RrYPpHP16ntj4iIVKZgKyJ1SsUZzJwCJ5wdLSgPasXkFDghrHxb+euMnEIOnCKwRoUEOCE1KoT+LcMrBdXGUcHHLheu6zOLQQF+BAUEERMe5O1SRM6K2v6IiEhVFGxFpFaUllryil1lgbP42Mxg+fNKQbWKYFoeWnMKS056n2ZF/n6GyJDj94/GRQTRr2UsiVHBxy4RdmZbned1PbCKNBQV2/5MuLCVt8sREfGIiIgIcnJy2Lt3L5MmTWLWrFk/2Sc1NZUnn3ySPn36nHScp59+mttvv52wsDAArrnmGqZNm0Z0dLRb6hw3bhyDBg3ixhtvdMt4NaFgK9LAuUotRSWlFJa4yn6WUuQqpaik7OEqpbC4lCJXhe0V9iksKSWv6HhgrTSDWvFnUQm2Gmsgld/bGRF8/L7OZtGhRIZEVnqvYmiNCAkgMjiw0rbgAD/dfydST6ntj4g0FE2bNq0y1FbX008/zS233HIs2M6ZM8ddpfkcfRqI+KjcwhLSdh/mUE7R8ZBZ4qoUKKsMouUh1VXxdelPjisPpydb/OdMVQyZ5eGySVRIheBZvi3whNfH9w8PDiCwGqvxikjDprY/IlIjn0yG/T+6d8wmXeHqx066efLkyTRv3pw777wTgAcffJCAgAAWLFhAVlYWxcXFPProowwdOrTScdu3b2fQoEGsXr2a/Px8xo8fT1paGh06dCA/P//YfnfccQdLly4lPz+fG2+8kYceeoh///vf7N27l4svvpj4+HgWLFhASkoKy5YtIz4+nqeeeoqXXnoJgAkTJnDPPfewfft2rr76ai644AK+/fZbmjVrxgcffEBoaOhp/wjmz5/Pb3/7W0pKSujbty/PPfccwcHBTJ48mdmzZxMQEMAVV1zBk08+ydtvv81DDz2Ev78/jRo1YtGiRWfzp16Jgq2ID7DWsudwPst3ZB17rNuXzakypzEQ5O9HUIAfwQF+FZ77l91H6bwXFhZw7HXwCfs6+/hX2n7ieOXjBAf6Vz5fhX1CA/19voeq1JCrBDK3Os/D4yEkGvz0JYR4R3nbnwUbDirYikidMHz4cO65555jwXbmzJnMmzePSZMmERUVxaFDhzj33HMZMmTISa84e+655wgLC2PdunWsWrWKXr16Hdv2l7/8hdjYWFwuF5deeimrVq1i0qRJPPXUUyxYsID4+PhKYy1fvpyXX36ZJUuWYK2lf//+DBw4kJiYGDZt2sT06dN54YUXuPnmm3nnnXe45ZZbTvn7FRQUMG7cOObPn0+7du0YM2YMzz33HLfeeivvvfce69evxxjD4cOHAXj44YeZN28ezZo1O/ZeTSnYinhBUUkpa/YeYfmOLFbsdILsgexCAMKD/OnZIoa7LmlLrxbRJMWEVgqf5cEywM/oUlvxjLxMOLAaDqyB/avhwI9wcD24Co/vY/whLBbCEyAszgm7YfFlP+Oc9yu+FxoDfrqPWdyjvO3Pwg3pavsjImfuFDOrntKzZ08OHjzI3r17SU9PJyYmhiZNmvDrX/+aRYsW4efnx549ezhw4ABNmjSpcoxFixYxadIkALp160a3bt2ObZs5cyZTpkyhpKSEffv2sXbt2krbT/T1119z/fXXEx4eDsANN9zAV199xZAhQ2jZsiU9evQAoHfv3mzfvv20v9+GDRto2bIl7dq1A2Ds2LE8++yz3HXXXYSEhHDbbbcxaNAgBg0aBMCAAQMYN24cN998MzfccMPp/wCrQcFWpBZk5BSyYudhJ8juyCJt92EKS5yVeZvHhnJeqzh6J8fQKzmG9o0jCdDluFIbXCWQsbksxFYIskf3Ht8nLB6adIF+E6FxZ/ALhLxDkJsOuYcgL8P5uf9H52fBSb51NX5OuA2LLwu9cRWCcLzzOjyhQhCOBX99RMnJqe2PiNQ1N910E7NmzWL//v0MHz6cN998k/T0dJYvX05gYCApKSkUFBSc8bjbtm3jySefZOnSpcTExDBu3LizGqdccPDx9oL+/v6VLnk+UwEBAXz//ffMnz+fWbNm8cwzz/DFF1/w/PPPs2TJEj7++GN69+7N8uXLiYuLO+vzgIKtiNuVllo2p+ccu6R4xY4sth7KBSDQ39ClWSNuPTf5WJBtHBXi5YqlQfjJLOxqSF8PJWUffH4BEN8eUi5wgmzjztC4K0Se4WWeruLjYTfvUIXwm17hvQw4uM55nZ8FVHXNvYHQ6ONBt9KMcFUzw3HgH1jTPyWpQ9T2R0TqmuHDhzNx4kQOHTrEl19+ycyZM0lMTCQwMJAFCxawY8eOUx5/0UUXMW3aNC655BJWr17NqlWrAMjOziY8PJxGjRpx4MABPvnkE1JTUwGIjIzk6NGjP7kU+cILL2TcuHFMnjwZay3vvfcer7/++ln/bu3bt2f79u1s3ryZNm3a8PrrrzNw4EBycnLIy8vjmmuuYcCAAbRq5axmv2XLFvr370///v355JNP2LVrl4KtiLflFpaQtsuZjV22I4sfdmaRXVACQFx4EL2SY7i5b3N6J8fQtVkjtZURz3KVQOYWZwb1wJrjYTZ7z/F9ymdh+06Axl2c5/HtICD45ONWl38gRDZxHtWtNz+ziiBcNitcHoQPbYLcxc6+9qd9iAEIaeT8bj1Hw4X31vx3EZ+mtj8iUtd07tyZo0eP0qxZM8455xxGjx7N4MGD6dq1K3369KFDhw6nPP6OO+5g/PjxdOzYkY4dO9K7d28AunfvTs+ePenQoQPNmzdnwIABx465/fbbueqqq2jatCkLFiw49n6vXr0YN24c/fr1A5zFo3r27Fmty46rEhISwssvv8xNN910bPGoX/ziF2RmZjJ06FAKCgqw1vLUU08BcN9997Fp0yastVx66aV07979rM5bkbHV6b9RB/Tp08cuW7bM22VIPXeqRZ6MgXaJkfRKjqF3cgx9kmNIjgvTvV/iOXmZx8PrqWZhj83AdnEeZzoL60tKXc4sb6UgXBZ+yy+RbnUx9B5b41MZY5Zba0/eHFBOy9OfzY98tJbXF+9g5Z8vV9sfETmldevW0bFjR2+XIWegqv9mp/ps1qeAyCkUlZSydl82y7Zn/mSRp7Agf3q2iOaui9vQOyWWHs2jaRSqSyHFA7w9C+tL/PyPX5osbmWMuQr4F+APTLXW/mR1FWPMzcCDONePp1lrR9VqkSe4uH0iL369jW83Z3BZpzr8hY2IiNSYgq00XNu+gsBQSDr+pc+pFnlKignl3FZx9NEiT+JJFWdhy2diT3YvbH2ZhRWvM8b4A88ClwO7gaXGmNnW2rUV9mkL/B4YYK3NMsYkeqfa4/q2jCEsyJ+FGw8q2IqIeNCdd97JN998U+m9X/3qV4wfP95LFf2Ugq00TKvexr47EYMlPbo7n0QM47WsLmzOcMKDFnkSjyrIhqztcHiH8zNrO2RucwLsyWZhm3R1gmx8ewgI8lLhUo/1AzZba7cCGGNmAEOBtRX2mQg8a63NArDWHqz1Kk8QHODP+a3V9kdExNOeffZZb5dwWgq20qCUllq2LJpOq4V3scJ05sOi3vwscy5jDv+JawKasKHLLYT0H0vnlGZa5EnOnqsEsncfD61ZFQJs1nZnAaSKQqIhJlmzsOJNzYBdFV7vBvqfsE87AGPMNziXKz9orZ1bO+WdnNr+iEh16QuwuuNs1oFSsJV6z1pL2u4jfJS2l4yVH/H34sdIozXTWz/OeR1bYFs8is34kvjF/yV+85Owa4qz8Ey/n0N0c2+XL77IWmcBo4phteLs6+FdYF3H9/cLgOgWEJMCTa9zfpY/opOdtjYivi8AaAukAknAImNMV2ttpebFxpjbgdsBWrRo4fGi1PZHRKojJCSEjIwM4uLiFG59nLWWjIwMQkLO7GpJBVupl6y1rNt3lI9W7eXDVXvZlZnPRQFreDHwcXJjOtBu/Ec81Sj2+AGJg6HjYNi9HL57Fhb/13l0vh7OuxOa9fLeLyPeUVIER3ZB1rYqZl53QOGRyvuHxTtBtVkf6HJjhfCaDFHNnEWPRHzXHqDiN3lJZe9VtBtYYq0tBrYZYzbiBN2lFXey1k4BpoCzKrLHKi7TPDaMNokRavsjIqeUlJTE7t27SU9P93YpUg0hISEkJSWd0TEKtlKvbD6Y44TZtL1sSc/F389wfus4HuxxlEuW/hMT05bocR9BWGzVAyT1hhtfgssegiXPw4rXYPUsaHG+E3DbX62AUq6kCLYtgvUfOrOXASHOCrwBoc7PwNCzeB1y/OHn4YW5rHVaxVScda04+3pkN87Cr2X8g52QGpMCLc796axrsGaKpE5bCrQ1xrTECbQjgBNXPH4fGAm8bIyJx7k0eWutVnkSqe0SeG3xDnILSwgP1j9tROSnAgMDadmypbfLEA/S3/5S5+3KzOPDVXv5MG0f6/ZlYwz0S4ll/ICWXN2lCXFHVsOrd0JUUxjzwclDbUXRzeHKv8DA/4Mf3oDvnoO3RkNsKzj3l9BjFASFe/6X8zUlRbB1Iax9H9Z/DAWHISgSGjWD4nwoKYSS8p8FNTuXf1DloBsQDIEhld87k9d5GScE2B1QnFv5nBFNnKCaPKDyjGtMirPN02FbxEustSXGmLuAeTj3z75krV1jjHkYWGatnV227QpjzFrABdxnrc3wXtXHpbZPZOrX21i8RW1/REQaKnM2N+b6Ik83gRffsv9IAR//uI8P0/aycpdze1fPFtEM7taUa7udc3wF4/2r4ZVrIaQRjP/ECWBnw1XizEx++wzsWeYs9tNnvHMfbtQ5bvqtfFRJIWxZUBZm5ziX4AY3gg7XQKeh0PqSqnulWls56BZXCLzlj+KCM3tdaZwTx63wmpP8vRYYVnmmtXy2NSbFuQc2KMxTf4pSB52qCbxUT219NheWuOj58Gfc0KsZj17X1ePnExER7zjVZ7NmbKXOOJRTyCer9/Nh2l6Wbs/EWujcNIr/u6oDg7qdQ/PYE0JJ+kZ4/TpnZnXs7LMPtQD+Ac79tp2vh13fw+Jn4Jt/OUG3yzDnMuVzutXsF/QlxQWw5QsnzG74BAqznS8HOg5ywmyr1KrDbEXGODOmgbXcJslacBX/NPiGRkN4glOXiNQravsjIiIKtuLTjuQVM2/Nfj5ctZdvt2TgKrW0SYzgnkvbMaj7ObROOMl9jZlb4bUhgIExs50ZOXdp3g+av+b0HV3yP/jhdVg1A1IuhPPvhjaX181LVovzYfPnsPYD2DAXio5CaAx0GgKdroeWF9WN/qnGOHXWhVpFxG3U9kdEpGHzaLA1xlwF/Avnfp2p1trHTtjeAngViC7bZ7K1do4xJgVYB2wo2/U7a+0vPFmr+I6cwhI+X3uAj1bt5cuN6RS7LC1iw/jFwFYM7t6U9o0jT/1t/JHd8OpQZ5Zu3McQ38Yzhca2hKsfg9TJziJTS56HaTdDXFs475fQbYTvX9palAebP3PC7MZ5UJQDobHQ5XrodJ0TZv0DvV2liMhpqe2PiEjD5rFga4zxB54FLsdpEbDUGDPbWru2wm73AzOttc8ZYzoBc4CUsm1brLU9PFWf+JaCYhdfrD/IR6v2Mn/dQQpLSjmnUQjjzk9hcPemdG3WqHqXlh3dD68OhoIjzuXHjTt5vvjQaBgwCc69wwmI3/4HPvo1zH8E+k5wHpE+tJhJUS5s+hTWvO/8LM6DsDjoeqMTZlMuUJgVkTqnvO3Pwg1q+yMi0hB5csa2H7DZWrsVwBgzAxgKVAy2Fogqe94I2OvBesTHFJWU8tWmdD5M28tnaw+QW+QiPiKIEX2bM6h7U3q3iMHP7wzuk8o9BK8NhaMHYMz70LSWvxfxD3TCYZdhsHOxc//toifgm6eh683OLG7jzrVbU7nCHNg0ryzMfubcfxqeAN1HOGE2eYBzH7GISB2mtj8iIg2XJ//WbwbsqvB6N9D/hH0eBD41xtwNhAOXVdjW0hjzA5AN3G+t/cqDtUotKXGVsnhrBh+m7WXu6v1kF5TQKDSQwd2bMrh7U/q3jCXA/yzuT83PchaKytoOo2c598F6izGQfL7zyNjitApa+SasfMNZQfi8O6H1pZ5fxKjwqHN58Zr3nHtnSwogPBF6ji4Ls+erJ6+I1Ctq+yMi0nB5++vMkcAr1tp/GGPOA143xnQB9gEtrLUZxpjewPvGmM7W2uyKBxtjbgduB2jRokVt1y7VVFpqWbo9kw9X7eWTH/eTkVtERHAAV3RqzODuTRnQJp6ggBostlR4FN64EQ6uh5EzoOWF7iu+puJaw7VPwsV/gOUvw5Ip8MYwSOjozOB2vdm9qwYXZMPGuc7M7ObPwVXo9F/tNcYJsy3OVZgVkXqrb8sYwoL8WbjxoIKtiEgD48lguwdoXuF1Utl7Fd0GXAVgrV1sjAkB4q21B4HCsveXG2O2AO2ASs3wrLVTgCng9MrzxC8hZ8daS9ruI3yYtpePV+1jf3YBIYF+XNqxMYO7NSW1fQIhgW4IWEV5MG0E7P0Bbn4N2l52+mO8ISwWLrwXzrsb1rzrXKY8+26Y/zD0nQh9b4Pw+LMbO//w8TC7ZT64iiCyqdNnt9N10Lx/3VylWUTkDKntj4hIw+XJYLsUaGuMaYkTaEcAo07YZydwKfCKMaYjEAKkG2MSgExrrcsY0wpoC2z1YK3iRgXFLm59cQlLt2cR5O/HwPYJ/KF7Ry7tkOjee55KCuGt0bDjGxg21emx6usCgpz7WrsNh22LYPGzsPCv8PVTznvn3QkJ7U8/Tn6W0192zftOv9nSYohKckJyp6GQ1FdhVkQapFS1/RERaZA8FmyttSXGmLuAeTitfF6y1q4xxjwMLLPWzgbuBV4wxvwaZyGpcdZaa4y5CHjYGFMMlAK/sNZmeqpWca+/zVnH0u1ZPDCoEzf2TqJRqAdW2HUVw9vjnFA39Fln0aa6xBhoNdB5pG+E756FtBmw4lVoe4UTcFsOrHwfbl4mbJjjhNmtC50w26g59P+5MzPbrLfCrIg0eKnt1fZHRKQhMtbWjyt4+/TpY5ctW3b6HcWj5q7ezy/eWM5tF7TkgUEearVT6oJ3bnMWRbrmSeg30TPnqW25h2DZS/D9FMhNh8ZdnftwS0ucMLvtS+d5dAsnyHa6Dpr18vwiVCINlDFmubW2j7frqMu89dl82VNf0iQqhDcmnLhmpYiI1GWn+mz29uJRUo/szsrjd7PS6JbUiP+7qoNnTlJaCh/c5YTayx+pP6EWnHtsB/4Ozp8EP77tXKb8/h3OtpgUOO8u5zLjpj0VZkVETkFtf0REGh79bS9uUewq5VczVlJq4T8je9ZsleOTsRbm3Atp0yD1DzBgkvvP4QsCQ6DXrdDzFqcfbmAYnNNdYVZEpJrU9kdEpOHRDXniFv/8bCPLd2Txtxu6khwX7v4TWAuf3u9cqjvgHmdms74r74fbtIdCrYjIGajY9kdERBoGBVupsa82pfPcl1sY0bc5g7s39cxJFvwVFj8D/X4Olz2ooCciIifltP2JP9b2R0RE6j8FW6mRg0cL+PVbK2mbGMGfB3f2zEm++gcsehx6jYGrHlOoFRGR00ptn8DurHy2pOd4uxQREakFCrZy1kpLLb95K42cwhKeGdWL0CB/95/ku+dg/sPQ9SYY9LTa2YiISLUcb/uT7uVKRESkNiglyFl77sstfL35EA8O7ky7xpHuP8Gyl2HuZOg4GK57Hvw8EJxFRKReSooJo01ihIKtiEgDoWArZ2XZ9kye+mwjg7s3ZXjf5u4/QdoM+OjX0PYKGPYS+GsBbxEROTOp7RL4flsmuYUl3i5FREQ8TMFWztjhvCImTf+BZtGh/PX6Lhh33/O65n2nf2vLC+Hm1yAgyL3ji4hIg3Bxh0SKXKUs3pLh7VJERMTDFGzljFhr+d2sVaTnFPLMqJ5EhgS69wQb58E7t0FSPxgxHQJD3Tu+iIg0GH1S1PZHRKShULCVM/Lqt9v5dO0B/u+qDnRLinbv4FsWwFu3QpOuMHomBEe4d3wREWlQytv+LFivtj8iIvWdgq1U2+o9R/jrnPVc2iGR2y5o6d7Bd3wLM0ZBXBu45V0IaeTe8UVEpEFKbZ/AnsNq+yMiUt8p2Eq15BSWcPf0H4gND+KJm7q7977a3cvhzZshqhmMeR/CYt03toiINGhq+yMi0jAo2MppWWt54P3V7MjI5V8jehAb7sbFnPatgjeuh/A4GDsbIhLdN7aIiDR4avsjItIwKNjKac1avpv3ftjDry5tR/9Wce4bOH0DvH4dBEXAmNkQ1dR9Y4uIiJS5uL3a/oiI1HcKtnJKmw8e5U8frOG8VnHcdUkb9w2csQVeHQJ+ATD2Q4hJdt/YIiIiFaS2V9sfEZH6TsFWTqqg2MVd034gLMifp0f0wN/PTffVHt4Frw0FVxGM+QDiWrtnXBERkSqUt/1ZsEFtf0RE6isFWzmpRz9ey/r9R/nHzd1pHBXinkGz98Grg6EgG259DxI7umdcERGRkyhv+7Nwg9r+iIjUVwq2UqVPftzHG9/t5OcXtSK1vZsWdMo95MzU5qbDLe9A0x7uGVdEROQ01PZHRKR+U7CVn9iVmcfv3llF9+bR3HtFe/cMmp8Fr10Hh3fAqLegeV/3jCsiIlINavsjIlK/KdhKJcWuUu6e/gMAz6rnoAYAACAASURBVIzsSVCAG/4XKciGN4bBoQ0w4k1IuaDmY4qIiJyBpJgw2qrtj4hIvaVgK5U8OW8DK3cd5u/DutE8NqzmAxblwrThsHcl3PQKtLms5mOKiIichVS1/RERqbcUbOWYhRsO8r9FWxndvwXXdD2n5gMWF8CM0bDrOxj2AnS4tuZjioiInKXytj/fqu2PiEi9o2ArABzILuDemWl0aBLJA4M61XzAkiJ4eyxsXQBDnoEuw2o+poiISA2Ut/1ZqLY/IiL1joKt4Cq13DNjJXlFLp4Z1ZOQQP+aDViUB7PGw8a5cO0/oOdo9xQqIiJSA2r7IyJSfynYCs8u2MzirRk8NLQzbRIjazZY9l54+WpY/zFc9Rj0neCeIkVERNxAbX9EROqnAG8XIN61ZGsGT3++ket6NOWm3kk1G2z3cpgxCopyYOR0aH+1e4oUERFxk4ptf2r8Za6IiPgMzdg2YFm5RfxqxkqS48J59PquGGPOfrBVbzsztQFBcNtnCrUiIuKT1PZHRKR+UrBtoKy1/PbtNDJzi/jPyJ5EBJ/l5H1pKXz+ELw7AZL6wMQF0NgNi0+JiIh4iNr+iIjUPwq2DdRL32xn/vqD/OGaDnRp1ujsBik8Cm/dAl8/Bb3Gwq3vQ3i8ewsVERFxM7X9ERGpfxRsG6BVuw/z2CfruLxTY8aen3J2g2RthxevcFY+vvpxGPwv5zJkERERH6e2PyIi9Y8Wj2pgjhYUc/f0H0iICOaJG7ud3X2127+BmbdCaQncMgtaX+L+QkVERDwkOMCfAW2Ot/2p0RoTIiLiEzRj24BYa/nDe6vZnZXPv0f2JDrsLGZYl78Krw2B0BiY8IVCrYiI1Elq+yMiUr8o2DYgM5ft4sO0vfzm8nb0SYk9s4NdJfDJZPhwErQcCBPmQ3wbzxQqIiLiYantEwG0OrKISD2hYNtAbDxwlD/PXsMFbeK5Y2DrMzs4PwvevBGWPAfn/hJGzYTQaM8UKiIiUguaRYfSNjGCBbrPVkSkXlCwbQDyi1zcNW0FEcEBPDW8O35+Z3Av0aHNMPUy2P41DPkPXPU38Net2SIiUveltk9g6bYstf0REakHFGwbgIc/WsPGAzk8dXMPEiNDqn/g5vkw9RJnxnbsbOg1xnNFioiI1DK1/RERqT8UbOu5D9P2Mv37XdyR2pqL2iVU7yBr4bvnnMuPo5Jg4gJIPt+zhYqIiNSyPikxhKvtj4hIvaBrSuuxnRl5/P7dH+nVIprfXN6uegeVFMGce2HFa9D+WrhhCgRHeLZQERERLwgO8Od8tf0REakXNGNbTxWVlHLX9BX4Gfj3yJ4E+lfjP3XuIXj9OifUXvhbGP6GQq2IiNRravsjIlI/aMa2nnp87npW7T7C87f0Jikm7PQHHFgD00dAzkG4YSp0u8nzRYqIiHhZedufBevTaZMY6eVqRETkbGnGth76Yv0Bpn69jTHnJXNVlyanP2D9x/DiFc5lyOPnKNSKiEiDUd72Z+FG3WcrIlKXKdjWM/uO5HPvzDQ6nRPFH67peOqdrYWv/gEzRkN8O7h9ITTrXRtlioiI+Ay1/RERqfsUbOsRV6nlVzNWUlhSyjOjehIS6H/ynYvz4d2JMP9h6DLMmamNOqf2ihUREfERF6vtj4hInadgW4/8e/4mvt+WyaPXdaFVwikWfcreBy9fAz++DZc8AMOmQmBo7RUqIiLiQ/qkxKrtj4hIHafFo+qJxVsy+M8XmxjWK4kbeiWdfMc9K2DGKCjIhuFvQsdBtVekiIiIDwoK8FPbHxGROk4ztvVARk4hv5rxAynx4Tw8tPPJd1z9Drx8NfgFwm2fKtSKiIiUKW/7s/mg2v6IiNRFCrZ1XGmp5d630zicX8wzI3sRHlzFJHxpKXzxKMz6GTTtBbcvgCZdar9YERHxWcaYq4wxG4wxm40xk6vYPs4Yk26MWVn2mOCNOj2lvO3Pwg3pXq5ERETOhoJtHTf1660s3JDOA9d2pFPTqJ/uUJgDM2+FRU9Az1thzAcQHl/7hYqIiM8yxvgDzwJXA52AkcaYTlXs+pa1tkfZY2qtFulhavsjIlK3KdjWYSt3HebxuRu4uksTbjk3+ac7HN4JL10JG+bAVY/BkP9AQFDtFyoiIr6uH7DZWrvVWlsEzACGermmWndxh0S1/RERqaMUbOuo7IJi7p6+gsZRITx2Q7efLnSx8zuYcjEc3gWj34Zz7wAthiEiIlVrBuyq8Hp32XsnGmaMWWWMmWWMaV7VQMaY240xy4wxy9LT69ZlvantEtT2R0SkjlKwrYOstfz+nR/Ze7iAf4/sSaOwwMo7rHgdXhkEIY1g4nxoc5l3ChURkfrkQyDFWtsN+Ax4taqdrLVTrLV9rLV9EhISarXAmlLbHxGRukvBtg6a/v0uPv5xH7+9oj29k2OObyh1wbw/wuy7IOUCJ9TGt/VeoSIiUlfsASrOwCaVvXeMtTbDWltY9nIq0LuWaqs1J7b9ERGRukPBtg56dsFm+rWM5ecXtTr+ZsERmHYzLH4G+v8CRs+C0JiTDyIiInLcUqCtMaalMSYIGAHMrriDMeacCi+HAOtqsb5ao7Y/IiJ1UxW9YcSXFRS72Hskn5v7NMfPr+ye2YwtMH0EZG6Fwf+C3uO8WqOIiNQt1toSY8xdwDzAH3jJWrvGGPMwsMxaOxuYZIwZApQAmcA4rxXsQRXb/rRtHOnlakREpLoUbOuYPYfzsRZaxIU6b2xZAG+PAz9/GDMbUgZ4tT4REambrLVzgDknvPenCs9/D/y+tuuqbc2iQ2nX2Gn7M7HilVEiIuLTPHopcjWavbcwxiwwxvxQtsriNRW2/b7suA3GmCs9WWddsjMjD4AWMaHw/QvwxjCIagoTv1CoFRERcYPU9mr7IyJS13gs2Faz2fv9wExrbU+c+3n+W3Zsp7LXnYGrgP+Wjdfg7czMI4ASOq14EOb8FtpeAbd9CjEp3i5NRESkXlDbHxGRuseTM7bVafZugaiy542AvWXPhwIzrLWF1tptwOay8Rq8nRm5TA1+mtBVr8EFv4ER0yBY9wCJiIi4S3nbnwVq+yMiUmd48h7bqpq99z9hnweBT40xdwPhQHnD1WbAdyccW1Wj+Aanya6PSTUr4Mq/wnl3erscERGReqe87c+XZW1/jDHeLklERE7D2+1+RgKvWGuTgGuA140x1a7JGHO7MWaZMWZZenq6x4r0GYVHuT79ObYFtXda+oiIiIhHXNw+UW1/RETqEE8G29M2ewduA2YCWGsXAyFAfDWPxVo7xVrbx1rbJyEhwY2l+yb75RPE20y+aHWfswqyiIhIQ7BkCmxbVKunTG3v/Lti4YYG8MW5iEg94Mlge9pm78BO4FIAY0xHnGCbXrbfCGNMsDGmJdAW+N6Dtfq+9I3w3X95qyQV/+Z9vF2NiIhI7SjOh2Uvwoxb4OC6Wjtt0wptf0RExPd5LNhaa0uA8mbv63BWP15jjHm4rME7wL3ARGNMGjAdGGcda3BmctcCc4E7rbUuT9Xq86yFuf+HKyCEx0uG0yIuzNsViYiI1I7AUBj9NgSGwJs3wdH9tXbq1PaJfL8tU21/RETqAI/eY2utnWOtbWetbW2t/UvZe3+y1s4ue77WWjvAWtvdWtvDWvtphWP/UnZce2vtJ56s0+et/xi2fMG69neRQSNaxCrYiohIAxLdAkbNhLxMJ9wW1s59r6ntEih2Wb7ZfKhWziciImfP24tHyekU58Pc30NiJxY2crolJcUo2IqISAPTtAfc9AocWA1vjwOX52dRy9v+LNyo+2xFRHydgq2v+/ppOLITrnmCHVlFNI4KJiRQC0eJiEgD1O4KuPYp2PwZzLnXuVXHg4IC/BhQoe2PiIj4LgVbX5a5Db7+J3S5EVIuYGdmni5DFhGRhq3PeLjgN7D8Fecz0sNS1fZHRKROULD1ZfP+CH4BcMUjAOzKzKO5gq2IiDR0lzwAXW+C+Q/Bqrc9eiq1/RERqRsUbH3Vps9hw8cw8D6IakpBsYv92QUkx4Z7uzIRERHv8vODoc9C8gXwwS9h+9ceO5Xa/oiI1A0Ktr6opBA++R3EtYFzfwnAnsP5WAst4kK9XJyIiIgPCAiGEW9ATEuYMQoOrvfYqcrb/uSo7Y+IiM9SsPVF3/0XMrfA1X93PriBnZl5ALrHVkREpFxojNPj1j/Yoz1uy9v+fKu2PyIiPkvB1tcc2QNfPgEdBkGby469vass2OoeWxERkQpikmH0TMg7BNNu9kiPW7X9ERHxfQq2vuazB8C64Mq/VHp7Z0YeIYF+JEQEe6kwERERH9W0p9Pjdv+PMOtnbu9xq7Y/IiK+T8HWl2z7Cla/AwPugZiUSpt2lLX6McZ4pzYRERFf1u5KuPYfsGkefHKf23vcqu2PiIhvU7D1Fa5iZ8Go6BZwwT0/2bxLPWxFREROrc/PnC+Hl70E3zzt1qHV9kdExLcp2PqKpVPh4Fq48m8QWHnlY2stO9XDVkRE5PQu/TN0GQafPwg/znLbsOVtfxZsUNsfERFfpGDrC3IOwoK/QutLocO1P9mckVtEXpFLM7YiIiKn4+cH1z0HyQPg/Tvc2uP24vaJLN2utj8iIr5IwdYXfP4QFOc77X2quIe2vNVPcpyCrYiIyGkFBMPwN5z1KmaMgvQNbhl2YHu1/RER8VUKtt62aymsfAPOuxPi21a9i3rYioiInJmw2Ao9bm+EowdqPGSfZLX9ERHxVQq23lTqgjm/hchz4KL7Trrbzgwn2CbFKNiKiIhUW0wKjHoLcst63Bbl1mg4tf0REfFdCrbetOI12LcSrngUgiNOutvOzDwaRwUTEuhfi8WJiIjUA816wY0vw/5Vbulxq7Y/IiK+ScHWW/IyYf7DzuIWXYadctedavUjIiJy9tpfBVc/DhvnOq31ajDbWt72R6sji4j4FgVbb1nwFyg44nzQVrFgVEVq9SMiIlJD/SbCgF/Bshfhm3+d9TBNo0Np3zhS/WxFRHyMgq037Etzmsf3mwhNupxy14JiF/uzCzRjKyIiUlOXPgidb4DP/1yjHrep7RPU9kdExMco2NY2a2HOfRAaC6m/P+3uew7nY61WRBYREamx8h63Lc53etzu+PashlHbHxER36NgW9tWvQW7lsBlD0Jo9Gl336lWPyIiIu4TGAIj3oToZJg+EtI3nvEQavsjIuJ7FGxrU0E2fPoANOsNPUZX65BjPWzjFGxFRETc4liP20B4cxjknNlCUGr7IyLiexRsa9OXf4fcdLjmCedyqGrYmZFHSKAfCRHBHi5ORESkAYlt6fS4zUk/qx635W1/Nqntj4iIT1CwrS0H18OS56HXGGfGtprKW/2Y06ycLCIiImeoWW+48SVnUccz7HFb3vZnodr+iIj4BAXb2mCt0zcvKAIu/fMZHaoetiIiIh7U4ZrjPW7n/l+1e9yq7Y+IiG9RsK0Naz+AbV/CJfdDeFy1D7PWsks9bEVERDyr30Q4fxIsnQrf/qfah6ntj4iI71Cw9bSiXJj3R2jcFXqPP6NDM3OLyC1yacZWRETE0y57CDpfD589AKvfrdYhavsjIuI7FGw97aunIHu3s2CUf8AZHbpDrX5ERERqh58fXPc8ND8X3vt5tXrcqu2PiIjvULD1pIwt8O2/odtwSD7vjA/fpWArIiJSewJDYOR0iG7h9Lg9tOmUu6vtj4iI71Cw9aR5fwD/ILj84bM6fGeGE2x1j62IiEgtCYuF0bPALwDeOH2P24s7qO2PiIgvULD1lA1znRUWUydDZJOzGmJnZh6No4IJCfR3c3EiIiJyUrEtYdRMJ9ROG37KHrdq+yMi4hsUbD2huADmTob4dtDv52c9jFr9iIiIeElSb7jxRdj7A7wzAUpdVe52TiO1/RER8QUKtp6w+D+QtQ2u/jsEBJ31MGr1IyIi4kUdrnV63G6YA5+cvMet2v6IiHifgq27Hd4Fi/4BHYdA60vOepjCEhf7sgs0YysiIuJN/W+H8+6CpS/A4meq3EVtf0REvE/B1t0+/aPz88q/1GiYPVn5WKsVkUVERLzu8keg01D49H5Y895PNvdJjiUiOIBXvt1OUUmpFwoUEREFW3fauhDWfgAX3uu0CqiBnWr1IyIi4hv8/OD6KU6P23d/Dju/q7Q5KMCPBwZ15NstGUya/gMlLoVbEZHapmDrLq5imPM7iEmB8++u8XAKtiIiIj6kvMdtoySYPgIOba60eXjfFvxpUCfmrtnPb99Ow1WqvrYiIrVJwdZdlvwPDm2Aqx5zPvxqaGdGHsEBfiREBruhOBEREamxsFi4ZRYYf3hzGORUXgn5Zxe05L4r2/P+yr388b0fsSdZbEpERNxPwdYdju6HhY9B2yug3VVuGbK81Y8xxi3jiYiIiBvEtoJRb8HRAzB9OBTlVdp858VtuPuSNsxYuouHPlyrcCsiUksUbN3hsz+Dq9CZrXVTEN2ZmUdynC5DFhER8TlJfWDYVNizosoet7+5vB23XdCSV77dzuPzNijciojUAgXbmtqxGFbNcO6rjWvtliGttephKyIi4ss6DnL61W/4GOZOrtTj1hjD/dd2ZHT/Fjy3cAvPfLH5FAOJiIg7BHi7gDqt1AWf3AdRzZyVkN0kM7eI3CKXFo4SERHxZf1/Dod3Ov1to5Ph/LuObTLG8MjQLuQXu/jHZxsJDfJnwoWtvFisiEj9pmBbE8tfhv0/wo0vQ1C424bVisgiIiJ1xOWPOOH20/shIhG63Xxsk5+f4fFh3SgsLuXRj9cRHOjPrecme7FYEZH6S5cin63cDJj/CKRcCJ2vd+vQCrYiIiJ1hJ8f3DAFks+HdyfCl09Uuiw5wN+Pfw7vwaUdEnng/dXMWr7bi8WKiNRfCrZn64uHofAoXPOE2xaMKrerLNgmxSjYioiI+LzAULjlXeg2HBY8Cu/eDsUFxzYHBfjx7OheXNAmnt/NSuOjVXu9WKyISP2kYHs29qyA5a9C/19AYke3D78jI4/EyGBCg/zdPraIiIh4QGAIXP8/uOR++HEmvDakUp/bkEB/pozpTZ/kWO6ZsZLP1x7wYrEiIvWPgu2ZKi2FOfdBeAKk/p9HTlHew1ZERETqEGPgovvgpldgXxpMvQQOrju2OSwogBfH9aFzs0b88s0VfLUp/eRjiYjIGVGwPVNp02DPMrj8IQhp5JFT7FKwFRGRWmaMucoYs8EYs9kYM/kU+w0zxlhjTJ/arK9O6Xw9jJ8DJYUw9XLY9PmxTZEhgbw6vi+tEyOY+NoylmzN8GKhIiL1h4Ltmcg/DJ8/CEn9oNsIj5yisMTFvuwCWsQp2IqISO0wxvgDzwJXA52AkcaYTlXsFwn8ClhSuxXWQc16w8QvIDYFpt0ES/53bFN0WBCv39aPZtGh/OyVpfywM8t7dYqI1BMKtmdi4WOQe8hZMMrPM390e7LysVYrIouISK3qB2y21m611hYBM4ChVez3CPB3oKCKbXKiRkkwfi60uwo++R18fC+4SgCIjwhm2sRziY8MZuxL37Nm7xEvFysiUrcp2FbXgTXw/RToMx6a9vDYadTqR0REvKAZsKvC691l7x1jjOkFNLfWfnyqgYwxtxtjlhljlqWn6x5SgiNg+Btw/iRYOtWZvc0/DEDjqBDenNCfiOAAbn3xezYdOOrlYkVE6i4F2+qwFub8DkKi4JIHPHqqXQq2IiLiY4wxfsBTwL2n29daO8Va28da2ychIcHzxdUFfv5wxSMw5BnYtghevBwytwJOa783J56Lv59h9NQlbD+U6+ViRUTqJgXb6lj9Duz4Gi79E4TFevRUOzPzCA7wIyEy2KPnERERqWAP0LzC66Sy98pFAl2AhcaY7cC5wGwtIHWGet0Kt74PuenwwqWw41sAWsaH8+aE/hS7Shk9dQm7s/K8XKiISN2jYHs6hTnw6f1wTnfoNdbjpytv9WOM8fi5REREyiwF2hpjWhpjgoARwOzyjdbaI9baeGttirU2BfgOGGKtXeadcuuwlhfChPnOF+WvDoGV0wBo1ziS12/rT3ZBMaOnLuFAtm5jFhE5Ewq2p/PVk3B0H1z9hHMpkYftyFCrHxERqV3W2hLgLmAesA6Yaa1dY4x52BgzxLvV1UNxrWHC55B8Hrx/h9NxobSULs0a8erP+nHoaCGjpy4hI6fQ25WKiNQZHg22p+uJZ4z5pzFmZdljozHmcIVtrgrbZp94bK04tBm+fQa6j4IW/T1+OmstuzLzaK5gKyIitcxaO8da285a29pa+5ey9/5krf3JZ7C1NlWztTUUGgO3vAu9x8HX/4S3x0BRLr1axPDiuL7syszj1he/50hesbcrFRGpEzwWbKvTE89a+2trbQ9rbQ/gP8C7FTbnl2+z1tb+t8XWOkvzB4bCZQ/Wyikzc4vILXJpxlZERKQh8A+EQU/DlX+DdR/By1dD9l7ObRXHlDF92Hwwh7Evf09OYYm3KxUR8XmenLGtbk+8ciOB6R6s58xsmANb5kPq7yGyca2csrzVT3Kcgq2IiEiDYAyc90sYOQMytsALl8DelQxsl8Azo3ry454j/OyVpeQXubxdqYiIT/NksD1tT7xyxphkoCXwRYW3Q8r64H1njLnOc2VWoTgf5k6GhA7Qb2KtnVY9bEVERBqo9lfBz+aBX4Azc7t2Nld0bsLTw3uwbHsmt7++jMIShVsRkZPxlcWjRgCzrLUV/8ZOttb2AUYBTxtjWp94kMeawH/zLzi8E65+3LlMqJaU97BNilGwFRERaXCadIGJX0BiJ5h5K3z1FIO7ncPfh3Xjq02HuPPNHyh2lXq7ShERn+TJYHu6nngVjeCEy5CttXvKfm4FFgI9TzzII03gs3Y4izh0vh5aDXTPmNW0MzOPxMhgQoM8v/qyiIiI+KCIRBj3EXQZBvMfgvd/yU09EnlkaGc+X3eAX7+1Elep9XaVIiI+J8CDYx/riYcTaEfgzL5WYozpAMQAiyu8FwPkWWsLjTHxwADgcQ/Wety8P4DxgyserZXTVVTew1ZEREQasMBQGPYixLeDhX+DrO3cOvwN8os78Nc56wkJ9OfxYd3w81PPexGRch6bsT2DnngjgBnW2opfP3YElhlj0oAFwGPW2rWeqvWYkiIIDIOLfguNkjx+uhPtysxXsBURERFnUanUyU7A3bMcpl7C7R1L+PVl7Zi1fDd/mr2ayv90EhFp2Dw5Y4u1dg4w54T3/nTC6werOO5boKsna6tSQBAMe8Fp9VPLikpK2XskXz1sRURE5LiuN0J0MswYCVMvZ9JNL5M3sBX/+3IroYH+/OGajhijmVsREV9ZPMq3eOEDYndWHtZqRWQRERE5QfO+zqJSjZIwb97E5LhvGHteMi98tY1/fr7J29WJiPgEBVsfcazVj3rYioiIyImiW8Bt86DNZZg59/Jg4KuM6N2Ef8/fxHMLt3i7OhERr1Ow9RHlrX6SNWMrIiIiVQmOhJHT4by7MN9P4W8Ff+XmLlH8fe56Xvlmm7erExHxKo/eYyvVtzMzj+AAPxIig71dioiIiPgqP3+48i8Q1wYz57f8PW43/m3v58EP1xIa5M/wvi28XaGIiFdoxtZHlLf60QIQIiIiclp9xsMt72CO7uOvGfdwW/JBJr/7Ix+s3OPtykREvELB1kfsVKsfEREROROtUmHCfExIFPcf+h33Nk7jNzPTmLt6v7crExGpdQq2PsBay67MPLX6ERERkTMT39YJt0n9uOvw33ks+gMmTV/Ggg0HvV2ZiEitUrD1AVl5xeQUlmjGVkRERM5cWCzc+h70vIWb8mbwYvhz3PP6t3y7+ZC3KxMRqTUKtj7gWKsfBVsRERE5GwFBMOQZuPwRLij6hreCH2Xya5+xfEemtysTEakVCrY+YEdGLqAetiIiIlIDxsCASZgR02jnt4dZ/n/kby+9zY+7j3i7MhERj1Ow9QHlPWybxyjYioiISA11uAa/n80jLjyY18yfePHFZ1i/P9vbVYmIeJSCrQ/YmZlHYmQwoUH+3i5FRERE6oNzuuH/8wX4J3bgKfsEc6f8kS0Hj3q7KhERj1Gw9QHlPWxFRERE3CayCcETPiG39bXcU/oaa54fy8a9Gd6uSkTEI6oVbI0x7xpjrjXGKAh7wC71sBURERFPCAojcvTrpPf6FUNK59Pof72Y98StfD3/AwqLirxdnYiI21Q3qP4XGAVsMsY8Zoxp78GaGpSiklL2HslXD1sRERHxDD8/EoY8zOHrp5GT0JPU3Llc8NUYsv/alqXPjmfPD/Og1OXtKkVEaiSgOjtZaz8HPjfGNAJGlj3fBbwAvGGtLfZgjfXansP5WKtWPyIiIuJZ0d2vJbr7tZQWHGXdV++Qn/YOXQ5+ROgH73L4wxgyml9B0oBRBLe5EPy07oeI1C3VCrYAxpg44BbgVuAH4E3gAmAskOqJ4hqCYz1s1epHREREaoFfSCQdLx8Hl4/jUGYGX8+fSeD62fTf/h7BO94iJyCGorbXEtv3ZkgeAP7V/ueiiIjXVOtvKmPMe0B74HVgsLV2X9mmt4wxyzxVXENwLNhqxlZERERqWXxsHJffdAfW/oLvN+xmzaJZNNk9l9S1b8O6NygIisW/8xAC/7+9e4+Pqrz3Pf79zUzuCSE3QEi4WFFAAREU6xVrrdhaUSvFVvsS6mXbar30tN3Y2qvusz2n3d2t3R5PaYvdWJWtuLWcva1urVi0ogJqVS4KWyDhIoYMCQmTQCbznD9mEiYhIQnMypokn/frNa+ZdZk1vywCi+88z3qeUy6Xxp5LyAWQtnr6r9P9zrkVnW1wzs1IYT2DTmXNfmWFAirLz/K7FAAAMEiZmWZOqNDMCXdq7/5b9MSazdr2+h81rf4lXfjmY8p46/eKZhcrOOnzspMvl8aeR8gFkFZ6+i/SJDN7yzlXK0lmViTpS865/+NdaYNDZTiiiuJcBQLmdykAfBvt3QAAIABJREFUAAAqysvUdedPkjtvot6s/Dv95LXN2v/en/Tp/a/pojcfV+6b/6pYTrECEy+VJl0ujTtPCmb4XTaAQa6nwfZG59wDrQvOub1mdqPioyXjGFQy1Q8AAEhDZqbpY4o1fcwZ2jdnmv749k5d8/omle1+RZ+PvaGL3n5C2W8ukcspkk1IhNzjzyfkAvBFT4Nt0MzMOeckycyCkjK9K2twcM6pKhzRzHHFfpcCAADQpSHZGfrKmWP0lTPH6N3tp+ux1ZX6wVtbNCP6luZlrNV57/y7Mt96WMoeKk24VDr5cmnc+VKI/y4C6Bs9DbbPKj5Q1K8Ty3+XWIdjsDfSrIYDUVpsAQBAvzG5vFCTyyfre5+dqP9451T9yxuf1i1VH2tWaJ2uz/ybTlv3tEJv/0HKLoyH3EmXS8fPIuQC8FRPg+3fKx5mv5ZYfl7Sbz2paBBhRGQAANBf5WWFNO/00Zp3+mht2LVPS98YrxveOl1NTdfoC4UfaH7h3zR+w/9T4O1H4iH3pM9Jk+ZIn7hACjFoJoDU6lGwdc7FJD2YeCBFmMMWAAAMBBOPG6IfzzlFd312op55d5eWvjFcF289WbmBL+qWMdv1hZy1Gv7+f8r+9qiUVSiddEm8u/InPkXIBZASPZ3Hdrykf5Q0SVJ263rn3PEe1TUoVCWCbUURwRYAAPR/2RlBXXlaua48rVybP27Qv62u1O/ezNVP94/VmMJ5+ubJu3SRVin3g2eld5ZKWUPiIXdSIuRmZHf/IQDQiZ52RX5I0g8l/bOkCyQtkBTwqqjBorImorKCLOVkBv0uBQAwgJjZ7Ypfu+sVv3VomqSFzrn/8rUwDConDMvX9z43Sd+6+CQ9v363HnujUrevjSpgl+mik67TzRU7NLX+JQXe/0/pnX+TMgukk2ZLFTOlwgppaIVUWB7vxgwA3ehpsM1xzv05MTLyNkk/MrO1kn7gYW0D3rbwfu6vBQB44avOuV+a2cWSiiR9RdLDkgi26HNZoaAunTJSl04ZqW01+/Vvq6v0xNrtumJjroYPuUJXn/Z1XTN8m4ZV/Una8B/Su090OEBhPOAWlh8Ku4UViUe5VDBCCtBIAAx2PQ22B8wsIGmTmd0qaYekfO/KGhyqwo06g6l+AACpZ4nnz0p62Dm3zszsSG8A+sKYkjx9Z/YE3XnRifrzho+1dHWl7v/LVt0v6ZwTrtWXLlmoCyukrP27pNpKqW67VFd16Lnqdamptv1BAyFpyMj2YbctBCeWM/N8+XkB9J2eBtvbJeVKuk3SPYp3R77Oq6IGg4PRmHbWNaqCFlsAQOqtNbP/kjRO0l1mViAp5nNNQJuMYECzTxmh2aeM0I7aRj2+ukqPr6nS1x99WzkZQc08vljnjp+mc8dfpPHD8tXue5kD9Ymgmwi7tVWHlrf9Vdq3U3It7T8wp/hQS+/QpPBbODr+nFcmBbjLDujPug22ZhaUNM859y1JDYrfX4tjtKO2Uc5JYwi2AIDUu17SqZI+dM5FzKxYXL+RpkYNzdGdF52o2y4cr5c3VWvFxo/18qY9uuf99ZKk4UOydM4JZTp3fKnOPqFUZQUF0rCJ8UdnWqJS/a6k8Jto+a2tksIfSlv+Ih1saP+eYJZUOKp92E1u9R0yioGtgDTXbbB1zrWY2Tl9UcxgwlQ/AAAPfVLS2865/WZ2raTTJP3S55qAIwoGTLNOGqZZJw2TJG3fG9Erm/bo5c179OeNu/Xkm9slxacWOnd8qc4dX6rTxxYrO6PD/bXBUDyQDq3o/IOci3dnbg27bd2dE6//+89S/UeSXPv35Q07FHbzR0h5pfFHbmm8xTevTMorkbKHSvT8B/pcT7siv2VmyyU9IWl/60rn3L97UtUg0BZsabEFAKTeg5KmmtlUSf9D8ZGRl0g639eqgF4oL8rV1WeM1tVnjFZLzGndzjq9vGmPXt5UrYf+ukWLVn6orFBAZ4wr1jknlOrc8WWaMKJAgUA3odJMyimKP0ZM7nyf6IF4l+a6pG7Orff87l4n/fdL0oG6zt8byJBySw4F3byyRPhtDcCJ59Z9sgoIwkAK9DTYZkuqkfSppHVOEsH2KFWFI8oKBVSWz6TkAICUizrnnJnNkfQvzrnfmdn1fhcFHK1gwDSlfKimlA/VLRecoMjBqF7/MKyXN+3RK5ur9Y9/2qh//NNGleZn6uxEyD13fKmGDznK7sOhLKl4XPzRlegBaf8eKbJH2l8t7a+JP3dc3rs1vl/H7s9tP1xm+6CbHIBzk8NwYjkzjyAMdKJHwdY5x305KVZZE1FFcW733yoCANB79WZ2l+LT/JybmNkgw+eagJTJzQzpggnDdMGEeLflj+qa9MrmeGvuXzfv0R/f3ilJGj8svy3kzjy+WLmZPW3T6YFQ6325o3q2f3NjUhBuDb+J50jNoeU9m+Kvo41dfG7OoaDbrjW4w3JOkZQzND5dEgNjYRDo0d9uM3tIh91oIDnnvpryigaJynCEbsgAAK/Mk/Rlxeez/cjMRkv6qc81AZ4ZUZitq6aX66rp5YrFnDZ+VK+XN1Xrlc179IfXt2nxX7coI2iaPqaoLeiePLJQwb5sYMjIOfK9vx0d3J8Ivsmtwh2CcMNuaff6+OuWA10cyKTsIVJ2Yfz+35yhSc+dreuwPch3Yugfevq11X8kvc6WdIWknakvZ3BwzqkqHGEOWwCAJxJh9hFJp5vZpZLecM4t8bsuoC8EAqZJI4do0sgh+rvzP6Gm5hat3hrWK5v2aOWmPfrpc+/rp8+9r6G5GfFuyyeU6pzxpSovSrMGh8y8+KNoTPf7Ohfv6pzcBbpxb3yQrMZaqaku6XWtVP3+oXXRpm7qyO9FKO6wjpGk0Yd62hX5yeRlM3tM0iueVDQI7I00q/5AlDlsAQCeMLMvKt5C+5Ikk/QrM/u2c26Zr4UBPsjOCCZaact0l6Tq+gP66+Y9bffn/uc7uyRJx5fm6Zzx8ftzzzy+WAXZ/ail0iw+CFVWgVR8fO/e29zUdQDubN3erdKuxLqu7htuFco+cgBubQ12TpLr5llH3ta63OPjdbWv2h83EIx3Ow9mSaHMDs9ZR94WzOywT9K6YGZ63isdi8Vb/6MHpJaD8Ufr6+gBqaW5k+0H4+uSX0cT21pfjz1bmvh5T0s/2hsNxksalspCBhNGRAYAeOx7kk53zn0sSWZWJukFSQRbDHplBVm6fNooXT5tlJxz2vRxQ9toy0+s2a4lq7YpGDCdNnqozjmhTOeML9XU8kKFggP0PtWMbCljhFQwovfvbWlOBN+6RPDdeygAt61LCsUNH0nVGxPb96mTOx2TWCL4dXw+0rakZ7U+dbdvN58Vi7YPdrHm3p+nrgS7CsqZRwjMmfEvDJLXuVhSmOwqYLYG00Q4bRdWk7a7ltT9fNKhnyszNz2CrZnVq/1v3keS/t6TigaB1mA7hjlsAQDeCLSG2oQaSQP0f+XA0TMznTi8QCcOL9D154zTgWiL3txW23Z/7i/+/IH++YUPVJAd0lmfKNE548t03vhSjSnJ87v09BDMODRwVW/FYvFA1jFcpmMrZrJYLKkl8kCHgHigQ6BM3qfDusNaQhPbo03ttzXtO/JxW4P2YS3CGe3DbzAzHi6DRYl1yYE5s32YDmYc3rrck9cd1wVCffrn2dOuyAVeFzKYVCWCbUW63csBABgonjWz5yQ9llieJ+kZH+sB+oWsUFCf/ESJPvmJEn1H0t79B/XX/96jVzbFuy4/t263JGlIdkhjS/M0piRPY0tyNaYkT2NKcjWmJFdl+VmydA9n6SAQUL/8vi0QkALZ6XP/cCzWP74Q6AM9bbG9QtKLzrm6xPJQSbOcc097WdxAVVkTUVlBlnIyg36XAgAYgJxz3zazL0g6O7FqkXPuKT9rAvqjorxMXTplpC6dMlLOOW3Zs1+vbN6jD3bXa1tNRH+rqtV/vrNTsaR+jbmZwbbAO7okV2MToXdsSZ5GDMlmqkekFlM5tenpPbY/TL4gOudqzeyHkgi2R4GpfgAAXksM/PhktzsC6BEz0/Fl+Tq+LL/d+oPRmHbUNmprzX5t27Nf28IRbauJ6P3d9Xphw241txxKvZmhgEYX57a18sbDb/x51NCcgXsfL9AHehpsO/tblsIZrgeXSqb6AQB4oJMxMdo2SXLOuSF9XBIw4GWGAhpXmqdxpXnSSe23tcScdtU1altNJB58ayLaume/KsMRvbJ5j5qaY237hgKm8qKctqCb3M25ojhHWSF6+gFH0tNwusbMfi7pgcTyLZLWelPSwHYwGtOuukam+gEApBxjYgDpJRgwlRflqrwoV2ef0H6AJeecPq4/oK174oF3W3i/ttZEtK1mv97atlf1B6Jt+5pJIwtzEvfxtgbeQ/f25mbS3gT09G/BNyR9X9K/Kf5N8POKh1v00s7aRsUcU/0AAAAMZmam4UOyNXxItmYeX9Jum3NOeyPNiVbe/dq6Jx54t4Ujem7dRwrvP9hu/2EFWR1Cb55K87NUkB1KPDJUkB1SBl2dMYD1dFTk/ZIWelzLoLCNOWwBAABwBGam4rxMFedl6rTRRYdtr2tsVmWie3NlONLW6rvyg2otqz/Q5XGzMwLxkJvVPvDmZx163TEMtz0n9snOCDDqM9JST0dFfl7SXOdcbWK5SNJS59zFXhY3EFUSbAEAAHAMCnMyNLm8UJPLCw/bFjkYVWU4onDDQe1riqrhQFT1Tc2qb4o/NxyIal9TVPVNUTU0NWv3vqa2bfsPtnT72aGAxcNwdkgFWR3Cb1Iozk+E5yFJATk/sT0/M8To0Ei5nnZFLm0NtZLknNtrZsM8qmlAqwpHlBUKaFhBlt+lAAAAYIDJzQxpwoijGyeuJebaBeHk1/uaompo6jwk76htbFuub4qqJdbZGHaHmEn5maG2oHtY63C7QHx4gB6SCMlBwjGS9DTYxsxstHOuUpLMbKw6H3UR3aisiaiiOJdvqQAAacXMZkv6paSgpN865+7rsP1mxcfXaJHUIOkm59z6Pi8UgGeCAVNhToYKczKO+hjOOTU2tyTCb3IQjqrhwKGQnByQ65uiqmk4qK179rfte7Al1u1n5WUGO7QWHwrAQ7K77m6dHJr9nmLJOafmFqeWmFNzLKZoi1O0JabmWOK5xSmaWN/cElM0lnhOWp+fHdKwgiyV5mepMCdj0HYV72mw/Z6kV8zsL4pPGXCupJs8q2oAYw5bAEC6MbOg4jMfXCRpu6TVZra8Q3B91Dn3fxP7Xybp55Jm93mxANKamSk3M6TczJCGH8MEYweinYXj5rZu1B2DcX1TVLWRg6oKR9qC84Fo9+E4JyN4WDBODr4Bs7Zw2ZwInR3DZXL4jHYWSNuC6eEhtbvW7d7KDAZUmp+pskTQLSs49Ghbzs9SaUGW8jKDAyoE93TwqGfNbIbiYfYtSU9LavSysIHIOacq5rAFAKSfMyRtds59KElmtlTSHEltwdY5ty9p/zzRcwuAh7JCQWXlB1Waf/S37x2MxtoF30PBuPnw0JzUmryztrFte8w5ZQQDCgVNoUBAGUFTKGjKCHRcF1AoYMrJCCqUHVIoEN8WClr8/YH4PhkdjtPx/R33S37/4XXE1+9rataehoOqrj/Q9tjTcEC76pr0zo461TQcUGf5OScjmBR6MxOhN1ulBZkqy28fiLMz0n8e5Z4OHnWDpNsllUt6W9KZklZJ+pR3pQ08tZFm1R+IMoctACDdjJJUlbS8XdLMjjuZ2S2SvikpU138H8DMblKiV9fo0aNTXigA9FRmKKCS/CyVHEM4HghaYk57I4cH3+r6A6pOPG/Zs19vbAlrb6S502MUZIcOa/Vt95x4FOdl+jatVE+7It8u6XRJrznnLjCzCZL+p3dlDUyMiAwA6M+ccw9IesDMvizpbknXdbLPIkmLJGnGjBm06gKAz4IBU2l+PJROPO7I+x6MxlSz/4D21B9UdUNTUhA+FIzX79ynPfUHVH8g2ukxivMyE92dD7X8nnVCqS44yduxh3sabJucc01mJjPLcs5tNLOTPK1sACLYAgDS1A5JFUnL5Yl1XVkq6UFPKwIA9LnMUEDHFebouMIcSYdPJ5Ws8WBLvOW34UCXrcFrtu1Vdf0BBQOBtAm2281sqOL31j5vZnslbfOurIGpNdhWFOf4XAkAAO2sljTezMYpHmivlvTl5B3MbLxzblNi8XOSNgkAMGjlZAZVUZzb7W2WzqV+kKzO9HTwqCsSL39kZisUj+/Pdve+Hkwd8M+SLkgs5koa5pwbmth2neLdnCTpXufcv/ak1nRWWRNRaX6WcjN7+n0CAADec85FzexWSc8pfs1e7JxbZ2Y/kbTGObdc0q1m9mlJzZL2qpNuyAAAdGQWHyjLa71OWM65v/Rkv55MHeCcuzNp/29ImpZ4XSzph5JmKD7q4trEe/f2tt50UhmOaEwJ3ZABAOnHOfeMpGc6rPtB0uvb+7woAAB6yMshq9qmDnDOHVT8fpw5R9j/S5IeS7y+WNLzzrlwIsw+rwEwVx5z2AIAAABA6nkZbDubOmBUZzua2RhJ4yS92Nv39hcHozHtqmtkqh8AAAAASDF/Jhk63NWSljnnWnrzJjO7yczWmNma6upqj0pLjZ21jYo5RkQGAAAAgFTzMtj2ZuqAq3WoG3KP3+ucW+Scm+Gcm1FWVnaM5XqLqX4AAAAAwBteBtu2qQPMLFPx8Lq8405mNkFSkaRVSaufk/QZMysysyJJn0ms67cItgAAAADgDc/mnenh1AFSPPAudc65pPeGzewexcOxJP3EORf2qta+UBWOKDMU0LCCLL9LAQAAAIABxdMJVbubOiCx/KMu3rtY0mLPiutj22oiqijKUSDg/RxOAAAAADCYpMvgUQMeU/0AAAAAgDcItn3AOaeqcERjSvL8LgUAAAAABhyCbR+ojTSr/kCUOWwBAAAAwAME2z7AiMgAAAAA4B2CbR8g2AIAAACAdwi2faA12FYU5/hcCQAAAAAMPATbPlAVjqg0P0u5mZ7OrgQAAAAAgxLBtg/Ep/qhtRYAAAAAvECw7QPMYQsAAAAA3iHYeuxgNKadtY0EWwAAAADwCMHWYztrGxVz0uiSPL9LAQAAAIABiWDrMab6AQAAAABvEWw9RrAFAAAAAG8RbD1WFY4oMxTQsIIsv0sBAAAAgAGJYOuxynBEFUU5CgTM71IAAAAAYEAi2HqMqX4AAAAAwFsEWw8551RZQ7AFAAAAAC8RbD1U19is+gNRVRBsAQAAAMAzBFsPbathRGQAAAAA8BrB1kOtU/2MKcnzuRIAAAAAGLgIth5qDbYVxTk+VwIAAAAAAxfB1kNV4YhK87OUmxnyuxQAAAAAGLAIth6KT/VDay0AAAAAeIlg6yHmsAUAAAAA7xFsPdLcEtPO2kaCLQAAAAB4jGDrkZ21jYo5MYctAAAAAHiMYOuR1hGRabEFAAAAAG8RbD2yrSYRbEsItgAAAADgJYKtR6rCEWWGAhpekO13KQAAAAAwoBFsPVIZjqiiKEeBgPldCgAAAAAMaARbjzDVDwAAAAD0DYKtB5xzqqwh2AIAAABAXyDYeqCusVn1B6JM9QMAAAAAfYBg6wGm+gEAAACAvkOw9UBbsGWqHwAAAADwHMHWA63BtqKIYAsAAAAAXiPYeqCyJqLS/EzlZYX8LgUAAAAABjyCrQcqwxEGjgIAAACAPkKw9UBlOKIxBFsAAAAA6BME2xRrbolpZ20jIyIDAAAAQB8h2KbYztpGxZzoigwAAAAAfYRgm2LMYQsAAAAAfYtgm2LMYQsAAAAAfYtgm2KV4YgygwENL8j2uxQAAAAAGBQItilWFY6ovDhHgYD5XQoAAAAADAoE2xSrDEe4vxYAAAAA+hDBNoWcc9pWQ7AFAAAAgL5EsE2husZm1TdFCbYAAAAA0IcItinEVD8AAAAA0PcItinEVD8AAAAA0PcItinUGmwrigi2AID+xcxmm9n7ZrbZzBZ2sv2bZrbezN4xsz+b2Rg/6gQAoDME2xSqCkdUmp+pvKyQ36UAANBjZhaU9ICkSyRNkvQlM5vUYbe3JM1wzk2RtEzS/+7bKgEA6BrBNoUqwxFVcH8tAKD/OUPSZufch865g5KWSpqTvINzboVzLpJYfE1SeR/XCABAlwi2KcQctgCAfmqUpKqk5e2JdV25XtKfPK0IAIBeINimSHNLTDtrmwi2AIABzcyulTRD0k+72H6Tma0xszXV1dV9WxwAYNAi2KbIztpGtcQcXZEBAP3RDkkVScvliXXtmNmnJX1P0mXOuQOdHcg5t8g5N8M5N6OsrMyTYgEA6MjTYNvdCIuJfb6YGGVxnZk9mrS+xczeTjyWe1lnKrSOiDyGYAsA6H9WSxpvZuPMLFPS1ZLaXXvNbJqkXyseaj/2oUYAALrk2fC9SSMsXqT4vTqrzWy5c2590j7jJd0l6Wzn3F4zG5Z0iEbn3Kle1ZdqzGELAOivnHNRM7tV0nOSgpIWO+fWmdlPJK1xzi1XvOtxvqQnzEySKp1zl/lWNAAASbycl6ZthEVJMrPWERbXJ+1zo6QHnHN7Jak/fwNcGY4oMxjQ8IJsv0sBAKDXnHPPSHqmw7ofJL3+dJ8XBQBAD3nZFbknIyyeKOlEM/urmb1mZrOTtmUnBp94zcwu97DOlKgKR1RenKNAwPwuBQAAAAAGFS9bbHv6+eMlzVJ8oIqVZjbZOVcraYxzboeZHS/pRTN71zn338lvNrObJN0kSaNHj+7byjtgqh8AAAAA8IeXLbY9GWFxu6Tlzrlm59wWSR8oHnTlnNuReP5Q0kuSpnX8gHQaebGyhmALAAAAAH7wMth2O8KipKcVb62VmZUq3jX5QzMrMrOspPVnq/29uWmlLtKsfU1Rgi0AAAAA+MCzrsg9HGHxOUmfMbP1klokfds5V2NmZ0n6tZnFFA/f9yWPppxuWkdEZg5bAAAAAOh7nt5j24MRFp2kbyYeyfu8Kmmyl7Wl0rbwfkmixRYAAAAAfOBlV+RBo20OW4ItAAAAAPQ5gm0KVIUjKs3PVF6W34NMAwAAAMDgQ7BNgcpwhPtrAQAAAMAnBNsUYA5bAAAAAPAPwfYYNbfEtLO2iWALAAAAAD4h2B6jXbVNaok5uiIDAAAAgE8ItseIEZEBAAAAwF8E22NEsAUAAAAAfxFsj9G28H5lBgMaPiTb71IAAAAAYFAi2B6jqnBE5cU5CgbM71IAAAAAYFAi2B4jpvoBAAAAAH8RbI9RZQ3BFgAAAAD8RLA9BnWRZu1rihJsAQAAAMBHBNtj0DoiMnPYAgAAAIB/CLbHgKl+AAAAAMB/BNtjQIstAAAAAPiPYHsMKsMRleRlKj8r5HcpAAAAADBoEWyPQVU4QmstAAAAAPiMYHsMtoX3a0wJwRYAAAAA/ESwPUrNLTHtrG1i4CgAAAAA8BnB9ijtqm1SS8zRFRkAAAAAfEawPUpM9QMAAAAA6YFge5QItgAAAACQHgi2R6kyHFFmMKDhQ7L9LgUAAAAABjWC7VGqCkdUXpSjYMD8LgUAAAAABjWC7VGqZA5bAAAAAEgLBNujVBmOcH8tAAAAAKQBgu1RqIs0q66xWWNKCLYAAAAA4DeC7VFoHRGZrsgAAAAA4D+C7VFgqh8AAAAASB8E26NAiy0AAAAApA+C7VGoDEdUkpep/KyQ36UAAAAAwKBHsD0KVUz1AwAAAABpg2B7FJjqBwAAAADSB8G2l6ItMe2obSTYAgAAAECaINj20q66JrXEHMEWAAAAANIEwbaXttUkpvopIdgCAAAAQDog2PYSc9gCAAAAQHoh2PZSZTiizGBAw4dk+10KAAAAAEAE216rCkdUXpSjYMD8LgUAAAAAIIJtr1Uyhy0AAAAApBWCbS8xhy0AAAAApBeCbS/URZpV19hMsAUAAACANEKw7YWqvfERkemKDAAAAADpg2DbC0z1AwAAAADph2DbC9tqEsG2hGALAAAAAOmCYNsLleGISvIylZ8V8rsUAAAAAEACwbYXqpjqBwAAAADSDsG2F5jqBwAAAADSD8G2h6ItMe2obSTYAgAAAECaIdj20K66JrXEHMEWAAAAANIMwbaHWqf64R5bAMBAZGazzex9M9tsZgs72X6emb1pZlEzu8qPGgEA6ArBtofa5rBlqh8AwABjZkFJD0i6RNIkSV8ys0kddquUNF/So31bHQAA3WPemh6qDEeUETSNGJLtdykAAKTaGZI2O+c+lCQzWyppjqT1rTs457YmtsX8KBAAgCOhxbaHKsMRlRflKhgwv0sBACDVRkmqSlrenlgHAEC/QLDtocoapvoBAKA7ZnaTma0xszXV1dV+lwMAGCQ8DbbdDUSR2OeLZrbezNaZ2aNJ668zs02Jx3Ve1tkTzGELABjAdkiqSFouT6zrNefcIufcDOfcjLKyspQUBwBAdzy7xzZpIIqLFO/StNrMljvn1iftM17SXZLOds7tNbNhifXFkn4oaYYkJ2lt4r17var3SOoizaprbCbYAgAGqtWSxpvZOMUD7dWSvuxvSQAA9JyXLbZtA1E45w5Kah2IItmNkh5oDazOuY8T6y+W9LxzLpzY9ryk2R7WekRVe5nqBwAwcDnnopJulfScpA2SHnfOrTOzn5jZZZJkZqeb2XZJcyX92szW+VcxAADteTkqcmcDUczssM+JkmRmf5UUlPQj59yzXbzXt0Es2qb6IdgCAAYo59wzkp7psO4HSa9XK95FGQCAtOP3dD8hSeMlzVL8YrnSzCb39M1mdpOkmyRp9OjRXtQn6VCwrSjO8ewzAAAAAABHx8uuyD0ZiGK7pOXOuWbn3BZJHygedHs0iEVfDVBRGY6oOC9TBdkZnn0GAAAAAODoeBls2waiMLNMxQeiWN5hn6cVb62VmZUq3jX5Q8Xv8fmMmRWZWZGkzyTW+aIqHOH+WgAAAABIU551RXYIviVPAAAVJklEQVTORc2sdSCKoKTFrQNRSFrjnFuuQwF2vaQWSd92ztVIkpndo3g4lqSfOOfCXtXancpwRFPKh/r18QAAAACAI/D0HtseDEThJH0z8ej43sWSFntZX09EW2LasbdRn58y0u9SAAAAAACd8LIr8oCwq65J0ZhjRGQAAAAASFME224cGhGZYAsAAAAA6Yhg2422OWxLCLYAAAAAkI4Itt2oDEeUETSNGJLtdykAAAAAgE4QbLtRGY6ovChXwYD5XQoAAAAAoBME224why0AAAAApDeCbTcqwxGNLs7xuwwAAAAAQBcItkdQ19is2kgzU/0AAAAAQBoj2B5BVeuIyMV5PlcCAAAAAOgKwfYI2qb6ocUWAAAAANIWwfYIWoNtBffYAgAAAEDaItgeQWU4ouK8TBVkZ/hdCgAAAACgCwTbI2CqHwAAAABIfwTbI4hP9UOwBQAAAIB0RrDtQrQlph17G5nDFgAAAADSHMG2C7vqmhSNOVpsAQAAACDNEWy7UNU2IjLBFgAAAADSWcjvAtLVtkSwHVOS53MlAFKlublZ27dvV1NTk9+lII1kZ2ervLxcGRmMgA8AQH9FsO1CZTiijKBpxJBsv0sBkCLbt29XQUGBxo4dKzPzuxykAeecampqtH37do0bN87vcgAAwFGiK3IXKsMRlRflKhjgP7/AQNHU1KSSkhJCLdqYmUpKSmjFBwCgnyPYdoE5bIGBiVCLjvidAACg/yPYdiE+hy1T/QAAAABAuiPYdqKusVm1kWam+gHQL0WjUb9LAAAA6FME2060TvVDsAWQapdffrmmT5+uk08+WYsWLZIkPfvsszrttNM0depUXXjhhZKkhoYGLViwQJMnT9aUKVP05JNPSpLy8/PbjrVs2TLNnz9fkjR//nzdfPPNmjlzpr7zne/ojTfe0Cc/+UlNmzZNZ511lt5//31JUktLi771rW/plFNO0ZQpU/SrX/1KL774oi6//PK24z7//PO64oor+uJ0AAAApASjIneCOWyBge/H/2+d1u/cl9JjTho5RD/8/MlH3Gfx4sUqLi5WY2OjTj/9dM2ZM0c33nijVq5cqXHjxikcDkuS7rnnHhUWFurdd9+VJO3du7fbz9++fbteffVVBYNB7du3Ty+//LJCoZBeeOEFffe739WTTz6pRYsWaevWrXr77bcVCoUUDodVVFSkr3/966qurlZZWZkeeughffWrXz32EwIAANBHCLadqKTFFoBH7r//fj311FOSpKqqKi1atEjnnXde21QzxcXFkqQXXnhBS5cubXtfUVFRt8eeO3eugsGgJKmurk7XXXedNm3aJDNTc3Nz23FvvvlmhUKhdp/3la98RX/4wx+0YMECrVq1SkuWLEnRTwwAAOA9gm0nKsMRFedlqiA7w+9SAHiku5ZVL7z00kt64YUXtGrVKuXm5mrWrFk69dRTtXHjxh4fI3kE345T1OTl5bW9/v73v68LLrhATz31lLZu3apZs2Yd8bgLFizQ5z//eWVnZ2vu3LltwRcAAKA/4B7bTlQy1Q8AD9TV1amoqEi5ubnauHGjXnvtNTU1NWnlypXasmWLJLV1Rb7ooov0wAMPtL23tSvy8OHDtWHDBsVisbaW364+a9SoUZKk3//+923rL7roIv36179uG2Cq9fNGjhypkSNH6t5779WCBQtS90MDAAD0AYJtJ+JT/RBsAaTW7NmzFY1GNXHiRC1cuFBnnnmmysrKtGjRIl155ZWaOnWq5s2bJ0m6++67tXfvXp1yyimaOnWqVqxYIUm67777dOmll+qss87Scccd1+Vnfec739Fdd92ladOmtRsl+YYbbtDo0aM1ZcoUTZ06VY8++mjbtmuuuUYVFRWaOHGiR2cAAADAG+ac87uGlJgxY4Zbs2bNMR8n2hLThO8/q787/3h9++IJKagMQLrYsGEDoe0Ibr31Vk2bNk3XX3+936X0uc5+N8xsrXNuhk8lDQipujYDACAd+drMTVQd7KprUjTmaLEFMKhMnz5deXl5+qd/+ie/SwEAAOg1gm0HTPUDYDBau3at3yUAAAAcNe6x7YCpfgAAAACgfyHYdlAZjigUMB1XmON3KQAAAACAHiDYdlAZjqi8KEfBgHW/MwAAAADAdwTbDqrCEY0uyfO7DAAAAABADzF4VAejinJ0wrACv8sAAAAAAPQQLbYd/J9rpuubF53odxkAoPz8fEnSzp07ddVVV3W6z6xZs9TdPKG/+MUvFIlE2pY/+9nPqra2NnWFAgAA+IxgCwBpbuTIkVq2bNlRv79jsH3mmWc0dOjQVJTWp6LRqN8lAACANEVXZACD058WSh+9m9pjjpgsXXJfl5sXLlyoiooK3XLLLZKkH/3oRwqFQlqxYoX27t2r5uZm3XvvvZozZ067923dulWXXnqp3nvvPTU2NmrBggX629/+pgkTJqixsbFtv6997WtavXq1GhsbddVVV+nHP/6x7r//fu3cuVMXXHCBSktLtWLFCo0dO1Zr1qxRaWmpfv7zn2vx4sWSpBtuuEF33HGHtm7dqksuuUTnnHOOXn31VY0aNUp//OMflZPT+Wjxv/nNb7Ro0SIdPHhQJ5xwgh5++GHl5uZq9+7duvnmm/Xhhx9Kkh588EGdddZZWrJkiX72s5/JzDRlyhQ9/PDDmj9/vi699NK2lun8/Hw1NDTopZde0ve//30VFRVp48aN+uCDD3T55ZerqqpKTU1Nuv3223XTTTdJkp599ll997vfVUtLi0pLS/X888/rpJNO0quvvqqysjLFYjGdeOKJWrVqlcrKyo7yDxkAAKQjgi0A9JF58+bpjjvuaAu2jz/+uJ577jnddtttGjJkiPbs2aMzzzxTl112mcw6H5n9wQcfVG5urjZs2KB33nlHp512Wtu2f/iHf1BxcbFaWlp04YUX6p133tFtt92mn//851qxYoVKS0vbHWvt2rV66KGH9Prrr8s5p5kzZ+r8889XUVGRNm3apMcee0y/+c1v9MUvflFPPvmkrr322k5ruvLKK3XjjTdKku6++2797ne/0ze+8Q3ddtttOv/88/XUU0+ppaVFDQ0NWrdune699169+uqrKi0tVTgc7va8vfnmm3rvvfc0btw4SdLixYtVXFysxsZGnX766frCF76gWCymG2+8UStXrtS4ceMUDocVCAR07bXX6pFHHtEdd9yhF154QVOnTiXUAgAwABFsAQxOR2hZ9cq0adP08ccfa+fOnaqurlZRUZFGjBihO++8UytXrlQgENCOHTu0e/dujRgxotNjrFy5UrfddpskacqUKZoyZUrbtscff1yLFi1SNBrVrl27tH79+nbbO3rllVd0xRVXKC8vPhL8lVdeqZdfflmXXXaZxo0bp1NPPVWSNH36dG3durXL47z33nu6++67VVtbq4aGBl188cWSpBdffFFLliyRJAWDQRUWFmrJkiWaO3duW8guLi7u9rydccYZbaFWku6//3499dRTkqSqqipt2rRJ1dXVOu+889r2az3uV7/6Vc2ZM0d33HGHFi9erAULFnT7eQAAoP8h2AJAH5o7d66WLVumjz76SPPmzdMjjzyi6upqrV27VhkZGRo7dqyampp6fdwtW7boZz/7mVavXq2ioiLNnz//qI7TKisrq+11MBhs1+W5o/nz5+vpp5/W1KlT9fvf/14vvfRSrz8vFAopFotJkmKxmA4ePNi2rTV4S9JLL72kF154QatWrVJubq5mzZp1xJ+zoqJCw4cP14svvqg33nhDjzzySK9rAwAA6Y/BowCgD82bN09Lly7VsmXLNHfuXNXV1WnYsGHKyMjQihUrtG3btiO+/7zzztOjjz4qKd5S+s4770iS9u3bp7y8PBUWFmr37t3605/+1PaegoIC1dfXH3asc889V08//bQikYj279+vp556Sueee26vf6b6+nodd9xxam5ubhccL7zwQj344IOSpJaWFtXV1elTn/qUnnjiCdXU1EhSW1fksWPHau3atZKk5cuXq7m5udPPqqurU1FRkXJzc7Vx40a99tprkqQzzzxTK1eu1JYtW9odV4rfO3zttddq7ty5CgaDvf75AABA+iPYAkAfOvnkk1VfX69Ro0bpuOOO0zXXXKM1a9Zo8uTJWrJkiSZMmHDE93/ta19TQ0ODJk6cqB/84AeaPn26JGnq1KmaNm2aJkyYoC9/+cs6++yz295z0003afbs2brgggvaHeu0007T/PnzdcYZZ2jmzJm64YYbNG3atF7/TPfcc49mzpyps88+u139v/zlL7VixQpNnjxZ06dP1/r163XyySfre9/7ns4//3xNnTpV3/zmNyVJN954o/7yl79o6tSpWrVqVbtW2mSzZ89WNBrVxIkTtXDhQp155pmSpLKyMi1atEhXXnmlpk6dqnnz5rW957LLLlNDQwPdkAEAGMDMOed3DSkxY8YM191cjgAGtw0bNmjixIl+l4E+tmbNGt155516+eWXu9yns98NM1vrnJvhdX0DGddmAEAqHenazD22AIAB67777tODDz7IvbUAAAxwdEUGAPTILbfcolNPPbXd46GHHvK7rCNauHChtm3bpnPOOcfvUgAAgIdosQUA9MgDDzzgdwkAAACdosUWwKAyUMYVQOrwOwEAQP9HsAUwaGRnZ6umpoYggzbOOdXU1Cg7O9vvUgAAwDGgKzKAQaO8vFzbt29XdXW136UgjWRnZ6u8vNzvMgAAwDEg2AIYNDIyMjRu3Di/ywDSkpnNlvRLSUFJv3XO3ddhe5akJZKmS6qRNM85t7Wv6wQAoDN0RQYAYJAzs6CkByRdImmSpC+Z2aQOu10vaa9z7gRJ/yzpf/VtlQAAdI1gCwAAzpC02Tn3oXPuoKSlkuZ02GeOpH9NvF4m6UIzsz6sEQCALhFsAQDAKElVScvbE+s63cc5F5VUJ6mkT6oDAKAbA+Ye27Vr1+4xs20pOlyppD0pOtZgxnlMDc5janAeU2MwnccxfhfQH5nZTZJuSiw2mNn7KTr0YPrd8xLnMTU4j6nBeUyNwXQeu7w2D5hg65wrS9WxzGyNc25Gqo43WHEeU4PzmBqcx9TgPA5YOyRVJC2XJ9Z1ts92MwtJKlR8EKl2nHOLJC1KdYH87qUG5zE1OI+pwXlMDc5jHF2RAQDAaknjzWycmWVKulrS8g77LJd0XeL1VZJedEwKDQBIEwOmxRYAABwd51zUzG6V9Jzi0/0sds6tM7OfSFrjnFsu6XeSHjazzZLCiodfAADSAsG2cynvQjVIcR5Tg/OYGpzH1OA8DlDOuWckPdNh3Q+SXjdJmtvXdSXhdy81OI+pwXlMDc5janAeJRm9iAAAAAAA/Rn32AIAAAAA+jWCbRIzm21m75vZZjNb6Hc9/ZGZVZjZCjNbb2brzOx2v2vqz8wsaGZvmdl/+F1Lf2VmQ81smZltNLMNZvZJv2vqj8zszsTf6ffM7DEzy/a7JgwOXJuPHdfm1OLafOy4NqcG1+b2CLYJZhaU9ICkSyRNkvQlM5vkb1X9UlTS/3DOTZJ0pqRbOI/H5HZJG/wuop/7paRnnXMTJE0V57PXzGyUpNskzXDOnaL44EIMHATPcW1OGa7NqcW1+dhxbT5GXJsPR7A95AxJm51zHzrnDkpaKmmOzzX1O865Xc65NxOv6xX/h2qUv1X1T2ZWLulzkn7rdy39lZkVSjpP8dFc5Zw76Jyr9beqfiskKScxf2mupJ0+14PBgWtzCnBtTh2uzceOa3NKcW1OQrA9ZJSkqqTl7eIf/WNiZmMlTZP0ur+V9Fu/kPQdSTG/C+nHxkmqlvRQotvYb80sz++i+hvn3A5JP5NUKWmXpDrn3H/5WxUGCa7NKca1+ZhxbT52XJtTgGvz4Qi28ISZ5Ut6UtIdzrl9ftfT35jZpZI+ds6t9buWfi4k6TRJDzrnpknaL4l79HrJzIoUbyUbJ2mkpDwzu9bfqgD0FtfmY8O1OWW4NqcA1+bDEWwP2SGpImm5PLEOvWRmGYpfOB9xzv273/X0U2dLuszMtire9e5TZvYHf0vql7ZL2u6ca22ZWKb4xRS982lJW5xz1c65Zkn/Luksn2vC4MC1OUW4NqcE1+bU4NqcGlybOyDYHrJa0ngzG2dmmYrffL3c55r6HTMzxe+Z2OCc+7nf9fRXzrm7nHPlzrmxiv8uvuicG9Tfwh0N59xHkqrM7KTEqgslrfexpP6qUtKZZpab+Dt+oRjoA32Da3MKcG1ODa7NqcG1OWW4NncQ8ruAdOGci5rZrZKeU3xUscXOuXU+l9UfnS3pK5LeNbO3E+u+65x7xseaMLh9Q9Ijif8Ufyhpgc/19DvOudfNbJmkNxUfXfUtSYv8rQqDAdfmlOHajHTDtfkYcW0+nDnn/K4BAAAAAICjRldkAAAAAEC/RrAFAAAAAPRrBFsAAAAAQL9GsAUAAAAA9GsEWwAAAABAv0awBfoZM2sxs7eTHgtTeOyxZvZeqo4HAMBgwLUZ8B/z2AL9T6Nz7lS/iwAAAG24NgM+o8UWGCDMbKuZ/W8ze9fM3jCzExLrx5rZi2b2jpn92cxGJ9YPN7OnzOxvicdZiUMFzew3ZrbOzP7LzHIS+99mZusTx1nq048JAEC/wbUZ6DsEW6D/yenQ3Wle0rY659xkSf8i6ReJdb+S9K/OuSmSHpF0f2L9/ZL+4pybKuk0SesS68dLesA5d7KkWklfSKxfKGla4jg3e/XDAQDQD3FtBnxmzjm/awDQC2bW4JzL72T9Vkmfcs59aGYZkj5yzpWY2R5JxznnmhPrdznnSs2sWlK5c+5A0jHGSnreOTc+sfz3kjKcc/ea2bOSGiQ9Lelp51yDxz8qAAD9AtdmwH+02AIDi+vidW8cSHrdokP34n9O0gOKf4O82sy4Rx8AgO5xbQb6AMEWGFjmJT2vSrx+VdLVidfXSHo58frPkr4mSWYWNLPCrg5qZgFJFc65FZL+XlKhpMO+mQYAAIfh2gz0Ab7VAfqfHDN7O2n5Wedc67QCRWb2juLf7H4pse4bkh4ys29Lqpa0ILH+dkmLzOx6xb/9/ZqkXV18ZlDSHxIXWJN0v3OuNmU/EQAA/RvXZsBn3GMLDBCJ+3hmOOf2+F0LAADg2gz0JboiAwAAAAD6NVpsAQAAAAD9Gi22AAAAAIB+jWALAAAAAOjXCLYAAAAAgH6NYAsAAAAA6NcItgAAAACAfo1gCwAAAADo1/4/aSJyv4pLXcUAAAAASUVORK5CYII=\n", - "text/plain": [ - "
" - ] - }, - "metadata": { - "tags": [], - "needs_background": "light" - } - } - ] - }, - { - "cell_type": "code", - "metadata": { - "colab": { - "base_uri": "https://localhost:8080/" - }, - "id": "phcNBAzlE4FR", - "outputId": "3ad688d9-e08f-41bb-b295-47449334cd91" - }, - "source": [ - "# making predictions on real sentences\n", - "\n", - "sample_text = (\"The movie was a waste of time. But some things are fine\")\n", - "predictions = model.predict(np.array([sample_text]))\n", - "print(predictions)" - ], - "execution_count": 20, - "outputs": [ - { - "output_type": "stream", - "text": [ - "[[-1.9860197]]\n" - ], - "name": "stdout" - } - ] - } - ] -} \ No newline at end of file diff --git a/Final model/Team_4/Wikipedia_Simplifier.ipynb b/Final model/Team_4/Wikipedia_Simplifier.ipynb new file mode 100644 index 0000000..84dbb36 --- /dev/null +++ b/Final model/Team_4/Wikipedia_Simplifier.ipynb @@ -0,0 +1,3213 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "Wikipedia_Simplifier.ipynb", + "provenance": [], + "collapsed_sections": [], + "authorship_tag": "ABX9TyMXxpFSxyna2QIYJl7yNDUM", + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + }, + "accelerator": "GPU", + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "c4768b71c55445f092c33d0d9b197834": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_77418127c4654006a6610af8722b4f98", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_977ad2096048427ab9d3c7c567d311f3", + "IPY_MODEL_fcb0e031854446af94b2f29bd1d6710d", + "IPY_MODEL_ccf5379321ec4eeeb2ccf0a014737889" + ] + } + }, + "77418127c4654006a6610af8722b4f98": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "977ad2096048427ab9d3c7c567d311f3": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_85d2c4b4c4a14bcaabf06c2899acc41e", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "Downloading: 100%", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_9cf7a6867acb4d3a93611932b9704d4e" + } + }, + "fcb0e031854446af94b2f29bd1d6710d": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_5af848fb9265434c93693ae6dd7e5ac1", + "_dom_classes": [], + "description": "", + "_model_name": "FloatProgressModel", + "bar_style": "success", + "max": 1802, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 1802, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_3a59e8f2a68a406fba8a15a8a54e62a3" + } + }, + "ccf5379321ec4eeeb2ccf0a014737889": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_70d73496167a433ca9e142e28fbb7596", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": " 1.80k/1.80k [00:00<00:00, 45.5kB/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_0ef209dc3b3d454fae7325d0ddff7240" + } + }, + "85d2c4b4c4a14bcaabf06c2899acc41e": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "9cf7a6867acb4d3a93611932b9704d4e": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "5af848fb9265434c93693ae6dd7e5ac1": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "3a59e8f2a68a406fba8a15a8a54e62a3": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "70d73496167a433ca9e142e28fbb7596": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "0ef209dc3b3d454fae7325d0ddff7240": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "f29ee09134c24e63bb9c6a1ea65f3e6e": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_acf2fbcdc6dd4b1ba5de8908eac46199", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_1c3db3e73d924a6990cd10ebfc2665d2", + "IPY_MODEL_a4a56f6446074adfa1b11bbcb34e3d22", + "IPY_MODEL_f7dc8ad7682f43e4a8cc5f3c3442cb01" + ] + } + }, + "acf2fbcdc6dd4b1ba5de8908eac46199": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "1c3db3e73d924a6990cd10ebfc2665d2": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_4e8374fa45104f0eb48209c3e573cec5", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "Downloading: 100%", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_64c4c18fcf9d41c4a0854d95bff4ebef" + } + }, + "a4a56f6446074adfa1b11bbcb34e3d22": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_ea74a17e91234cb1a803d9de41384ca4", + "_dom_classes": [], + "description": "", + "_model_name": "FloatProgressModel", + "bar_style": "success", + "max": 1222317369, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 1222317369, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_0c8133e3342b4f649cfba7d8e3795232" + } + }, + "f7dc8ad7682f43e4a8cc5f3c3442cb01": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_cf9d482e115c48cf861c22c88201b238", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": " 1.22G/1.22G [00:23<00:00, 52.4MB/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_11155ce2cd7141e394fd874e213efec7" + } + }, + "4e8374fa45104f0eb48209c3e573cec5": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "64c4c18fcf9d41c4a0854d95bff4ebef": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "ea74a17e91234cb1a803d9de41384ca4": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "0c8133e3342b4f649cfba7d8e3795232": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "cf9d482e115c48cf861c22c88201b238": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "11155ce2cd7141e394fd874e213efec7": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "7e961ac3509f4628bdfdbc7fc95d74de": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_61a80c0b95264b8aa78518d988419ae4", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_fff9f1c3802b477195c738a86e4afd48", + "IPY_MODEL_c2cab65440a742cf8aa268fcb839fd95", + "IPY_MODEL_7ea8106314e94965ab142c5fe945dfad" + ] + } + }, + "61a80c0b95264b8aa78518d988419ae4": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "fff9f1c3802b477195c738a86e4afd48": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_eb31c9e0621d4f029ff12b3b085b4adb", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "Downloading: 100%", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_24ea13fff6084c41a97b191e38c4a936" + } + }, + "c2cab65440a742cf8aa268fcb839fd95": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_95e097a72dd9430fa4990d8125f3aaf7", + "_dom_classes": [], + "description": "", + "_model_name": "FloatProgressModel", + "bar_style": "success", + "max": 26, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 26, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_c1beffc7e319462194f55af137816064" + } + }, + "7ea8106314e94965ab142c5fe945dfad": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_783c049c65db4d958dd770c828869968", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": " 26.0/26.0 [00:00<00:00, 575B/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_ee16cce8e6444c138326ce2cbc7f4431" + } + }, + "eb31c9e0621d4f029ff12b3b085b4adb": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "24ea13fff6084c41a97b191e38c4a936": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "95e097a72dd9430fa4990d8125f3aaf7": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "c1beffc7e319462194f55af137816064": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "783c049c65db4d958dd770c828869968": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "ee16cce8e6444c138326ce2cbc7f4431": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "a02e3162567847668f976f1e865506fd": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_65c2bacb18b24862955d1292704caa83", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_31a297d11a4743c5a07848d55194213e", + "IPY_MODEL_c395b5aaed7348119db6d55d03118dd2", + "IPY_MODEL_6b88509078be4a7fbd565dd772144ea1" + ] + } + }, + "65c2bacb18b24862955d1292704caa83": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "31a297d11a4743c5a07848d55194213e": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_53054592ddfe40d3b18546ce3ef29eb1", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "Downloading: 100%", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_b5cad8be41d34a33b6455763477c5afc" + } + }, + "c395b5aaed7348119db6d55d03118dd2": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_f26727ff8e564530947c62a8d54aa2db", + "_dom_classes": [], + "description": "", + "_model_name": "FloatProgressModel", + "bar_style": "success", + "max": 898822, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 898822, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_4d0999f7d88b4405b97e3a5bba72fe80" + } + }, + "6b88509078be4a7fbd565dd772144ea1": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_d1556920ef6f474a975a6672ba311b49", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": " 899k/899k [00:00<00:00, 3.04kB/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_dbbed27ac93a4e79a06bad53e775484e" + } + }, + "53054592ddfe40d3b18546ce3ef29eb1": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "b5cad8be41d34a33b6455763477c5afc": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "f26727ff8e564530947c62a8d54aa2db": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "4d0999f7d88b4405b97e3a5bba72fe80": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "d1556920ef6f474a975a6672ba311b49": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "dbbed27ac93a4e79a06bad53e775484e": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "d782e1075f2b47f78d43173231f78e8e": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_view_name": "HBoxView", + "_dom_classes": [], + "_model_name": "HBoxModel", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.5.0", + "box_style": "", + "layout": "IPY_MODEL_da051dda894240e7ada9b38a2ee83391", + "_model_module": "@jupyter-widgets/controls", + "children": [ + "IPY_MODEL_333a987a9a9c44d38d597a9605f9a228", + "IPY_MODEL_0bab7b0fb741464eba9c3c17449ce7d3", + "IPY_MODEL_213a2195a4f04c74973ef8961c9b45fe" + ] + } + }, + "da051dda894240e7ada9b38a2ee83391": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "333a987a9a9c44d38d597a9605f9a228": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_4d4a4b87e88d475b971a8a1c2e517a14", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": "Downloading: 100%", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_a519ea9882e54227848a48fe085c8a91" + } + }, + "0bab7b0fb741464eba9c3c17449ce7d3": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "state": { + "_view_name": "ProgressView", + "style": "IPY_MODEL_98d396a455b54f24943ff39d82373290", + "_dom_classes": [], + "description": "", + "_model_name": "FloatProgressModel", + "bar_style": "success", + "max": 456318, + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": 456318, + "_view_count": null, + "_view_module_version": "1.5.0", + "orientation": "horizontal", + "min": 0, + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_48d0ce68c7674e28b14e7791e9a9dc75" + } + }, + "213a2195a4f04c74973ef8961c9b45fe": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_view_name": "HTMLView", + "style": "IPY_MODEL_01b7da4567b047ee9f4df9ec405aa46b", + "_dom_classes": [], + "description": "", + "_model_name": "HTMLModel", + "placeholder": "​", + "_view_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "value": " 456k/456k [00:00<00:00, 1.62MB/s]", + "_view_count": null, + "_view_module_version": "1.5.0", + "description_tooltip": null, + "_model_module": "@jupyter-widgets/controls", + "layout": "IPY_MODEL_21321dd78f5047bb913ca765a437433e" + } + }, + "4d4a4b87e88d475b971a8a1c2e517a14": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "a519ea9882e54227848a48fe085c8a91": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "98d396a455b54f24943ff39d82373290": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "ProgressStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "bar_color": null, + "_model_module": "@jupyter-widgets/controls" + } + }, + "48d0ce68c7674e28b14e7791e9a9dc75": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + }, + "01b7da4567b047ee9f4df9ec405aa46b": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_view_name": "StyleView", + "_model_name": "DescriptionStyleModel", + "description_width": "", + "_view_module": "@jupyter-widgets/base", + "_model_module_version": "1.5.0", + "_view_count": null, + "_view_module_version": "1.2.0", + "_model_module": "@jupyter-widgets/controls" + } + }, + "21321dd78f5047bb913ca765a437433e": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_view_name": "LayoutView", + "grid_template_rows": null, + "right": null, + "justify_content": null, + "_view_module": "@jupyter-widgets/base", + "overflow": null, + "_model_module_version": "1.2.0", + "_view_count": null, + "flex_flow": null, + "width": null, + "min_width": null, + "border": null, + "align_items": null, + "bottom": null, + "_model_module": "@jupyter-widgets/base", + "top": null, + "grid_column": null, + "overflow_y": null, + "overflow_x": null, + "grid_auto_flow": null, + "grid_area": null, + "grid_template_columns": null, + "flex": null, + "_model_name": "LayoutModel", + "justify_items": null, + "grid_row": null, + "max_height": null, + "align_content": null, + "visibility": null, + "align_self": null, + "height": null, + "min_height": null, + "padding": null, + "grid_auto_rows": null, + "grid_gap": null, + "max_width": null, + "order": null, + "_view_module_version": "1.2.0", + "grid_template_areas": null, + "object_position": null, + "object_fit": null, + "grid_auto_columns": null, + "margin": null, + "display": null, + "left": null + } + } + } + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "BSW8-I5_88bV" + }, + "source": [ + "# Wikipedia Simplifier" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "1ZoFd6QdN1wt" + }, + "source": [ + "### Import pretrained text-simplification model: *MUSS*" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "FfFPGur8N3Jn", + "outputId": "3485fa8d-5cb9-4b6e-b392-bdadf6216aaa" + }, + "source": [ + "!git clone https://github.com/ris27hav/muss/" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cloning into 'muss'...\n", + "remote: Enumerating objects: 325, done.\u001b[K\n", + "remote: Counting objects: 100% (325/325), done.\u001b[K\n", + "remote: Compressing objects: 100% (189/189), done.\u001b[K\n", + "remote: Total 325 (delta 171), reused 274 (delta 135), pack-reused 0\u001b[K\n", + "Receiving objects: 100% (325/325), 4.53 MiB | 10.02 MiB/s, done.\n", + "Resolving deltas: 100% (171/171), done.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "aslpFvlnpY53", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "57c910b8-dabb-45cd-8952-babae6267854" + }, + "source": [ + "!pip install -e muss/" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Obtaining file:///content/muss\n", + "Requirement already satisfied: numpy>=1.18.1 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (1.19.5)\n", + "Requirement already satisfied: pandas>=1.0.3 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (1.1.5)\n", + "Collecting nltk>=3.4.3\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/5e/37/9532ddd4b1bbb619333d5708aaad9bf1742f051a664c3c6fa6632a105fd8/nltk-3.6.2-py3-none-any.whl (1.5MB)\n", + "\u001b[K |████████████████████████████████| 1.5MB 8.3MB/s \n", + "\u001b[?25hCollecting tqdm>=4.45.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7a/ec/f8ff3ccfc4e59ce619a66a0bf29dc3b49c2e8c07de29d572e191c006eaa2/tqdm-4.61.2-py2.py3-none-any.whl (76kB)\n", + "\u001b[K |████████████████████████████████| 81kB 11.9MB/s \n", + "\u001b[?25hRequirement already satisfied: sklearn>=0.0 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (0.0)\n", + "Requirement already satisfied: torch>=1.4.0 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (1.9.0+cu102)\n", + "Requirement already satisfied: scipy>=1.3.1 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (1.4.1)\n", + "Collecting gitpython\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/bc/91/b38c4fabb6e5092ab23492ded4f318ab7299b19263272b703478038c0fbc/GitPython-3.1.18-py3-none-any.whl (170kB)\n", + "\u001b[K |████████████████████████████████| 174kB 52.9MB/s \n", + "\u001b[?25hRequirement already satisfied: spacy>=2.1.3 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (2.2.4)\n", + "Requirement already satisfied: joblib>=0.13.2 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (1.0.1)\n", + "Collecting python-Levenshtein>=0.12.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/2a/dc/97f2b63ef0fa1fd78dcb7195aca577804f6b2b51e712516cc0e902a9a201/python-Levenshtein-0.12.2.tar.gz (50kB)\n", + "\u001b[K |████████████████████████████████| 51kB 8.7MB/s \n", + "\u001b[?25hCollecting fairseq==0.10.2\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/15/ab/92c6efb05ffdfe16fbdc9e463229d9af8c3b74dc943ed4b4857a87b223c2/fairseq-0.10.2-cp37-cp37m-manylinux1_x86_64.whl (1.7MB)\n", + "\u001b[K |████████████████████████████████| 1.7MB 36.5MB/s \n", + "\u001b[?25hCollecting truecase\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/6a/ec/ca9dc9ab492aebc57af351709355d74d90e2b71c2b75befd2a1bf2c5db78/truecase-0.0.14-py3-none-any.whl (28.4MB)\n", + "\u001b[K |████████████████████████████████| 28.4MB 105kB/s \n", + "\u001b[?25hCollecting sentencepiece>=0.1.83\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/ac/aa/1437691b0c7c83086ebb79ce2da16e00bef024f24fec2a5161c35476f499/sentencepiece-0.1.96-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.2MB)\n", + "\u001b[K |████████████████████████████████| 1.2MB 50.2MB/s \n", + "\u001b[?25hCollecting imohash>=1.0.4\n", + " Downloading https://files.pythonhosted.org/packages/72/5d/27347eaeedb44f5a5d965a096dd6177109550cc0e5918c3164e4b2bf754c/imohash-1.0.4-py2.py3-none-any.whl\n", + "Requirement already satisfied: cachetools>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (4.2.2)\n", + "Requirement already satisfied: dill>=0.3.0 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (0.3.4)\n", + "Collecting submitit\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/bf/e9/c206d0199a71656ea2ceba9a96cf436a1c9581ecf171dfaf1cc54ec42d3a/submitit-1.3.3.tar.gz (54kB)\n", + "\u001b[K |████████████████████████████████| 61kB 10.1MB/s \n", + "\u001b[?25h Installing build dependencies ... \u001b[?25l\u001b[?25hdone\n", + " Getting requirements to build wheel ... \u001b[?25l\u001b[?25hdone\n", + " Preparing wheel metadata ... \u001b[?25l\u001b[?25hdone\n", + "Collecting faiss-gpu\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/e6/20/cce8f99dde167453ea108f35cd4bfffcc318a314aaf1bdfb167f6be2c989/faiss_gpu-1.7.1.post2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (89.7MB)\n", + "\u001b[K |████████████████████████████████| 89.7MB 94kB/s \n", + "\u001b[?25hCollecting sacremoses>=0.0.38\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/75/ee/67241dc87f266093c533a2d4d3d69438e57d7a90abb216fa076e7d475d4a/sacremoses-0.0.45-py3-none-any.whl (895kB)\n", + "\u001b[K |████████████████████████████████| 901kB 45.2MB/s \n", + "\u001b[?25hCollecting nevergrad>=0.4.0.post3\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/5c/fa/6005e9e15784fb22ce5ea078c95a0677c618a2fca90a2630b6df0ea5e7f9/nevergrad-0.4.3.post4-py3-none-any.whl (381kB)\n", + "\u001b[K |████████████████████████████████| 389kB 48.0MB/s \n", + "\u001b[?25hRequirement already satisfied: editdistance>=0.5.3 in /usr/local/lib/python3.7/dist-packages (from muss==1.0) (0.5.3)\n", + "Collecting tokenizers>=0.5.2\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d4/e2/df3543e8ffdab68f5acc73f613de9c2b155ac47f162e725dcac87c521c11/tokenizers-0.10.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl (3.3MB)\n", + "\u001b[K |████████████████████████████████| 3.3MB 41.7MB/s \n", + "\u001b[?25hCollecting easse@ git+git://github.com/feralvam/easse.git\n", + " Cloning git://github.com/feralvam/easse.git to /tmp/pip-install-4hv7io5r/easse\n", + " Running command git clone -q git://github.com/feralvam/easse.git /tmp/pip-install-4hv7io5r/easse\n", + "Collecting kenlm@ git+git://github.com/kpu/kenlm.git\n", + " Cloning git://github.com/kpu/kenlm.git to /tmp/pip-install-4hv7io5r/kenlm\n", + " Running command git clone -q git://github.com/kpu/kenlm.git /tmp/pip-install-4hv7io5r/kenlm\n", + "Collecting cachier>=1.2.8\n", + " Downloading https://files.pythonhosted.org/packages/62/49/0eda73732e333d2978bc8576037efc31d0b69b9c4d499588fe27c1189f2e/cachier-1.5.0-py2.py3-none-any.whl\n", + "Requirement already satisfied: python-dateutil>=2.7.3 in /usr/local/lib/python3.7/dist-packages (from pandas>=1.0.3->muss==1.0) (2.8.1)\n", + "Requirement already satisfied: pytz>=2017.2 in /usr/local/lib/python3.7/dist-packages (from pandas>=1.0.3->muss==1.0) (2018.9)\n", + "Requirement already satisfied: regex in /usr/local/lib/python3.7/dist-packages (from nltk>=3.4.3->muss==1.0) (2019.12.20)\n", + "Requirement already satisfied: click in /usr/local/lib/python3.7/dist-packages (from nltk>=3.4.3->muss==1.0) (7.1.2)\n", + "Requirement already satisfied: scikit-learn in /usr/local/lib/python3.7/dist-packages (from sklearn>=0.0->muss==1.0) (0.22.2.post1)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch>=1.4.0->muss==1.0) (3.7.4.3)\n", + "Collecting gitdb<5,>=4.0.1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/ea/e8/f414d1a4f0bbc668ed441f74f44c116d9816833a48bf81d22b697090dba8/gitdb-4.0.7-py3-none-any.whl (63kB)\n", + "\u001b[K |████████████████████████████████| 71kB 11.7MB/s \n", + "\u001b[?25hRequirement already satisfied: thinc==7.4.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (7.4.0)\n", + "Requirement already satisfied: murmurhash<1.1.0,>=0.28.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (1.0.5)\n", + "Requirement already satisfied: requests<3.0.0,>=2.13.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (2.23.0)\n", + "Requirement already satisfied: plac<1.2.0,>=0.9.6 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (1.1.3)\n", + "Requirement already satisfied: preshed<3.1.0,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (3.0.5)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (57.0.0)\n", + "Requirement already satisfied: catalogue<1.1.0,>=0.0.7 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (1.0.0)\n", + "Requirement already satisfied: cymem<2.1.0,>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (2.0.5)\n", + "Requirement already satisfied: wasabi<1.1.0,>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (0.8.2)\n", + "Requirement already satisfied: srsly<1.1.0,>=1.0.2 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (1.0.5)\n", + "Requirement already satisfied: blis<0.5.0,>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.1.3->muss==1.0) (0.4.1)\n", + "Requirement already satisfied: cffi in /usr/local/lib/python3.7/dist-packages (from fairseq==0.10.2->muss==1.0) (1.14.6)\n", + "Collecting dataclasses\n", + " Downloading https://files.pythonhosted.org/packages/26/2f/1095cdc2868052dd1e64520f7c0d5c8c550ad297e944e641dbf1ffbb9a5d/dataclasses-0.6-py3-none-any.whl\n", + "Requirement already satisfied: cython in /usr/local/lib/python3.7/dist-packages (from fairseq==0.10.2->muss==1.0) (0.29.23)\n", + "Collecting sacrebleu>=1.4.12\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7e/57/0c7ca4e31a126189dab99c19951910bd081dea5bbd25f24b77107750eae7/sacrebleu-1.5.1-py3-none-any.whl (54kB)\n", + "\u001b[K |████████████████████████████████| 61kB 9.4MB/s \n", + "\u001b[?25hCollecting hydra-core\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/c3/cd/85aa2e3a8babc36feac99df785e54abf99afbc4acc20488630f3ef46980a/hydra_core-1.1.0-py3-none-any.whl (144kB)\n", + "\u001b[K |████████████████████████████████| 153kB 33.6MB/s \n", + "\u001b[?25hCollecting mmh3>=2.5.1\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d9/46/0e568554c7f70ebc3de0c2b2effd3f7b25e66e0e4e0eacaeb7c9145949f2/mmh3-3.0.0-cp37-cp37m-manylinux2010_x86_64.whl (50kB)\n", + "\u001b[K |████████████████████████████████| 51kB 8.5MB/s \n", + "\u001b[?25hCollecting varint>=1.0.2\n", + " Downloading https://files.pythonhosted.org/packages/a8/fe/1ea0ba0896dfa47186692655b86db3214c4b7c9e0e76c7b1dc257d101ab1/varint-1.0.2.tar.gz\n", + "Requirement already satisfied: cloudpickle>=1.2.1 in /usr/local/lib/python3.7/dist-packages (from submitit->muss==1.0) (1.3.0)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from sacremoses>=0.0.38->muss==1.0) (1.15.0)\n", + "Collecting bayesian-optimization>=1.2.0\n", + " Downloading https://files.pythonhosted.org/packages/bb/7a/fd8059a3881d3ab37ac8f72f56b73937a14e8bb14a9733e68cc8b17dbe3c/bayesian-optimization-1.2.0.tar.gz\n", + "Collecting cma>=2.6.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/d5/41/2e59507b1f2a904ff6aae9130f21756d19d5d7bb28558215de7614bf6cfd/cma-3.1.0-py2.py3-none-any.whl (269kB)\n", + "\u001b[K |████████████████████████████████| 276kB 47.7MB/s \n", + "\u001b[?25hRequirement already satisfied: matplotlib in /usr/local/lib/python3.7/dist-packages (from easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (3.2.2)\n", + "Requirement already satisfied: seaborn in /usr/local/lib/python3.7/dist-packages (from easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (0.11.1)\n", + "Collecting stanfordnlp\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/41/bf/5d2898febb6e993fcccd90484cba3c46353658511a41430012e901824e94/stanfordnlp-0.2.0-py3-none-any.whl (158kB)\n", + "\u001b[K |████████████████████████████████| 163kB 54.9MB/s \n", + "\u001b[?25hCollecting yattag\n", + " Downloading https://files.pythonhosted.org/packages/a2/dc/53ce10a508df291c973293535536961fdc014de088cf0c9534165af880d5/yattag-1.14.0.tar.gz\n", + "Requirement already satisfied: plotly>=4.0.0 in /usr/local/lib/python3.7/dist-packages (from easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (4.4.1)\n", + "Collecting bert_score\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/38/fb/e63e7e231a79db0489dbf7e7d0ebfb279ccb3d8216aa0d133572f784f3fa/bert_score-0.3.9-py3-none-any.whl (59kB)\n", + "\u001b[K |████████████████████████████████| 61kB 8.9MB/s \n", + "\u001b[?25hCollecting tseval@ git+https://github.com/facebookresearch/text-simplification-evaluation.git\n", + " Cloning https://github.com/facebookresearch/text-simplification-evaluation.git to /tmp/pip-install-4hv7io5r/tseval\n", + " Running command git clone -q https://github.com/facebookresearch/text-simplification-evaluation.git /tmp/pip-install-4hv7io5r/tseval\n", + "Collecting simalign@ git+https://github.com/cisnlp/simalign.git\n", + " Cloning https://github.com/cisnlp/simalign.git to /tmp/pip-install-4hv7io5r/simalign\n", + " Running command git clone -q https://github.com/cisnlp/simalign.git /tmp/pip-install-4hv7io5r/simalign\n", + "Collecting watchdog\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/6b/d1/b0a1e69af06d2f6f47a11238ca115667d858cbb30baf6b6df03f1b874163/watchdog-2.1.3-py3-none-manylinux2014_x86_64.whl (75kB)\n", + "\u001b[K |████████████████████████████████| 81kB 11.5MB/s \n", + "\u001b[?25hCollecting portalocker\n", + " Downloading https://files.pythonhosted.org/packages/68/33/cb524f4de298509927b90aa5ee34767b9a2b93e663cf354b2a3efa2b4acd/portalocker-2.3.0-py2.py3-none-any.whl\n", + "Collecting pathtools\n", + " Downloading https://files.pythonhosted.org/packages/e7/7f/470d6fcdf23f9f3518f6b0b76be9df16dcc8630ad409947f8be2eb0ed13a/pathtools-0.1.2.tar.gz\n", + "Collecting smmap<5,>=3.0.1\n", + " Downloading https://files.pythonhosted.org/packages/68/ee/d540eb5e5996eb81c26ceffac6ee49041d473bc5125f2aa995cf51ec1cf1/smmap-4.0.0-py2.py3-none-any.whl\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.13.0->spacy>=2.1.3->muss==1.0) (2021.5.30)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.13.0->spacy>=2.1.3->muss==1.0) (2.10)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.13.0->spacy>=2.1.3->muss==1.0) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.13.0->spacy>=2.1.3->muss==1.0) (1.24.3)\n", + "Requirement already satisfied: importlib-metadata>=0.20; python_version < \"3.8\" in /usr/local/lib/python3.7/dist-packages (from catalogue<1.1.0,>=0.0.7->spacy>=2.1.3->muss==1.0) (4.6.1)\n", + "Requirement already satisfied: pycparser in /usr/local/lib/python3.7/dist-packages (from cffi->fairseq==0.10.2->muss==1.0) (2.20)\n", + "Collecting antlr4-python3-runtime==4.8\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/56/02/789a0bddf9c9b31b14c3e79ec22b9656185a803dc31c15f006f9855ece0d/antlr4-python3-runtime-4.8.tar.gz (112kB)\n", + "\u001b[K |████████████████████████████████| 112kB 52.3MB/s \n", + "\u001b[?25hCollecting omegaconf==2.1.*\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/f9/96/1966b48bfe6ca64bfadfa7bcc9a8d73c5d83b4be769321fcc5d617abeb0c/omegaconf-2.1.0-py3-none-any.whl (74kB)\n", + "\u001b[K |████████████████████████████████| 81kB 12.6MB/s \n", + "\u001b[?25hRequirement already satisfied: importlib-resources; python_version < \"3.9\" in /usr/local/lib/python3.7/dist-packages (from hydra-core->fairseq==0.10.2->muss==1.0) (5.2.0)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.7/dist-packages (from matplotlib->easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (0.10.0)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (2.4.7)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.7/dist-packages (from matplotlib->easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (1.3.1)\n", + "Requirement already satisfied: protobuf in /usr/local/lib/python3.7/dist-packages (from stanfordnlp->easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (3.17.3)\n", + "Requirement already satisfied: retrying>=1.3.3 in /usr/local/lib/python3.7/dist-packages (from plotly>=4.0.0->easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (1.3.3)\n", + "Collecting transformers>=3.0.0\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/fd/1a/41c644c963249fd7f3836d926afa1e3f1cc234a1c40d80c5f03ad8f6f1b2/transformers-4.8.2-py3-none-any.whl (2.5MB)\n", + "\u001b[K |████████████████████████████████| 2.5MB 44.8MB/s \n", + "\u001b[?25hCollecting networkx==2.4\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/41/8f/dd6a8e85946def36e4f2c69c84219af0fa5e832b018c970e92f2ad337e45/networkx-2.4-py3-none-any.whl (1.6MB)\n", + "\u001b[K |████████████████████████████████| 1.6MB 38.5MB/s \n", + "\u001b[?25hRequirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata>=0.20; python_version < \"3.8\"->catalogue<1.1.0,>=0.0.7->spacy>=2.1.3->muss==1.0) (3.5.0)\n", + "Collecting PyYAML>=5.1.*\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/7a/a5/393c087efdc78091afa2af9f1378762f9821c9c1d7a22c5753fb5ac5f97a/PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl (636kB)\n", + "\u001b[K |████████████████████████████████| 645kB 34.0MB/s \n", + "\u001b[?25hCollecting huggingface-hub==0.0.12\n", + " Downloading https://files.pythonhosted.org/packages/2f/ee/97e253668fda9b17e968b3f97b2f8e53aa0127e8807d24a547687423fe0b/huggingface_hub-0.0.12-py3-none-any.whl\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from transformers>=3.0.0->bert_score->easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (21.0)\n", + "Requirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from transformers>=3.0.0->bert_score->easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (3.0.12)\n", + "Requirement already satisfied: decorator>=4.3.0 in /usr/local/lib/python3.7/dist-packages (from networkx==2.4->simalign@ git+https://github.com/cisnlp/simalign.git->easse@ git+git://github.com/feralvam/easse.git->muss==1.0) (4.4.2)\n", + "Building wheels for collected packages: submitit\n", + " Building wheel for submitit (PEP 517) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for submitit: filename=submitit-1.3.3-cp37-none-any.whl size=66089 sha256=b69cc2d23e415893164a086d51a00f5f59757326837a0ce865ea5165227e8899\n", + " Stored in directory: /root/.cache/pip/wheels/75/be/c1/c57fe81efce574db4e4b3d23f26c1b75bd1d6af1124f29a78d\n", + "Successfully built submitit\n", + "Building wheels for collected packages: python-Levenshtein, easse, kenlm, varint, bayesian-optimization, yattag, tseval, simalign, pathtools, antlr4-python3-runtime\n", + " Building wheel for python-Levenshtein (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for python-Levenshtein: filename=python_Levenshtein-0.12.2-cp37-cp37m-linux_x86_64.whl size=149824 sha256=067701789bc53f114493660dafbb5c0259eccead8789eb7d9c2d178b07781e77\n", + " Stored in directory: /root/.cache/pip/wheels/b3/26/73/4b48503bac73f01cf18e52cd250947049a7f339e940c5df8fc\n", + " Building wheel for easse (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for easse: filename=easse-0.2.4-cp37-none-any.whl size=17949076 sha256=2ec0944b89bba67f720f84afef818dd40428d8b9feb93576a5031a8046d8a2b0\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-s8yyf0l4/wheels/b1/7a/f7/5d5d05b5c333984cd88e73cc1f8dc7e55ec3ea51ae85b7ea32\n", + " Building wheel for kenlm (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for kenlm: filename=kenlm-0.0.0-cp37-cp37m-linux_x86_64.whl size=2338490 sha256=10cc4f35065167ca2efa18cfe6c156c3799acfa19781f80424d08c646d62ba79\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-s8yyf0l4/wheels/2f/aa/73/e0e5c4d0dc2988a0dd85f3b97771b5cfd2166d2059816f92a9\n", + " Building wheel for varint (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for varint: filename=varint-1.0.2-cp37-none-any.whl size=1980 sha256=79a3bc4b8002cdf38a43badc09ef0032e546eace6f3823ca44700c50597db31e\n", + " Stored in directory: /root/.cache/pip/wheels/87/62/fe/15e506bdde650fdd4e63d3eb91a1c3582ff1ee450e88e89d9e\n", + " Building wheel for bayesian-optimization (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for bayesian-optimization: filename=bayesian_optimization-1.2.0-cp37-none-any.whl size=11686 sha256=e4204c839f52442c000cfcf5a56afb45b532abb3be30476f920eee665f4fb3fc\n", + " Stored in directory: /root/.cache/pip/wheels/5a/56/ae/e0e3c1fc1954dc3ec712e2df547235ed072b448094d8f94aec\n", + " Building wheel for yattag (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for yattag: filename=yattag-1.14.0-cp37-none-any.whl size=15659 sha256=1b6599d8c3b9725796cad683932dba75bb9800a6c81eb226844715e70dfa4b7b\n", + " Stored in directory: /root/.cache/pip/wheels/f9/f7/03/c04356ec5f399b897afb5242246a54faa292d9d190cad31430\n", + " Building wheel for tseval (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for tseval: filename=tseval-1.0-cp37-none-any.whl size=8536656 sha256=a56f420a15950fc691b8365346c3c8416fdbaa890190c52bb375f521044aff25\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-s8yyf0l4/wheels/66/ce/57/ba6e236f6c0a0500a633c945517fddd8594ef9f1244a215889\n", + " Building wheel for simalign (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for simalign: filename=simalign-0.2-cp37-none-any.whl size=5575 sha256=1cd80ba486909594ab8136f6707cc7be75237902ea2ba5e1c62be0da81d291e1\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-s8yyf0l4/wheels/03/82/85/ea8257c402ccd3f3ffc8887438f151f94402300db79f044f4d\n", + " Building wheel for pathtools (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for pathtools: filename=pathtools-0.1.2-cp37-none-any.whl size=8807 sha256=4d73f1b9f0b15ecf3bf6376d66eab53bb0f293354712f564ed15099bc2f85c64\n", + " Stored in directory: /root/.cache/pip/wheels/0b/04/79/c3b0c3a0266a3cb4376da31e5bfe8bba0c489246968a68e843\n", + " Building wheel for antlr4-python3-runtime (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for antlr4-python3-runtime: filename=antlr4_python3_runtime-4.8-cp37-none-any.whl size=141231 sha256=1ec4e353f4af32770306bc63b0d0416174f8f7428b4f18a204ceadf41c85e03b\n", + " Stored in directory: /root/.cache/pip/wheels/e3/e2/fa/b78480b448b8579ddf393bebd3f47ee23aa84c89b6a78285c8\n", + "Successfully built python-Levenshtein easse kenlm varint bayesian-optimization yattag tseval simalign pathtools antlr4-python3-runtime\n", + "\u001b[31mERROR: albumentations 0.1.12 has requirement imgaug<0.2.7,>=0.2.5, but you'll have imgaug 0.2.9 which is incompatible.\u001b[0m\n", + "\u001b[31mERROR: sacrebleu 1.5.1 has requirement portalocker==2.0.0, but you'll have portalocker 2.3.0 which is incompatible.\u001b[0m\n", + "\u001b[31mERROR: easse 0.2.4 has requirement nltk==3.4.3, but you'll have nltk 3.6.2 which is incompatible.\u001b[0m\n", + "Installing collected packages: tqdm, nltk, smmap, gitdb, gitpython, python-Levenshtein, dataclasses, portalocker, sacrebleu, antlr4-python3-runtime, PyYAML, omegaconf, hydra-core, fairseq, truecase, sentencepiece, mmh3, varint, imohash, submitit, faiss-gpu, sacremoses, bayesian-optimization, cma, nevergrad, tokenizers, stanfordnlp, yattag, huggingface-hub, transformers, bert-score, tseval, networkx, simalign, easse, kenlm, watchdog, pathtools, cachier, muss\n", + " Found existing installation: tqdm 4.41.1\n", + " Uninstalling tqdm-4.41.1:\n", + " Successfully uninstalled tqdm-4.41.1\n", + " Found existing installation: nltk 3.2.5\n", + " Uninstalling nltk-3.2.5:\n", + " Successfully uninstalled nltk-3.2.5\n", + " Found existing installation: PyYAML 3.13\n", + " Uninstalling PyYAML-3.13:\n", + " Successfully uninstalled PyYAML-3.13\n", + " Found existing installation: networkx 2.5.1\n", + " Uninstalling networkx-2.5.1:\n", + " Successfully uninstalled networkx-2.5.1\n", + " Running setup.py develop for muss\n", + "Successfully installed PyYAML-5.4.1 antlr4-python3-runtime-4.8 bayesian-optimization-1.2.0 bert-score-0.3.9 cachier-1.5.0 cma-3.1.0 dataclasses-0.6 easse-0.2.4 fairseq-0.10.2 faiss-gpu-1.7.1.post2 gitdb-4.0.7 gitpython-3.1.18 huggingface-hub-0.0.12 hydra-core-1.1.0 imohash-1.0.4 kenlm-0.0.0 mmh3-3.0.0 muss networkx-2.4 nevergrad-0.4.3.post4 nltk-3.6.2 omegaconf-2.1.0 pathtools-0.1.2 portalocker-2.3.0 python-Levenshtein-0.12.2 sacrebleu-1.5.1 sacremoses-0.0.45 sentencepiece-0.1.96 simalign-0.2 smmap-4.0.0 stanfordnlp-0.2.0 submitit-1.3.3 tokenizers-0.10.3 tqdm-4.61.2 transformers-4.8.2 truecase-0.0.14 tseval-1.0 varint-1.0.2 watchdog-2.1.3 yattag-1.14.0\n" + ], + "name": "stdout" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "pydevd_plugins" + ] + } + } + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "H0zHV8cRqo36", + "outputId": "937d6cb2-72d7-4279-907f-75cc33a017e3" + }, + "source": [ + "%cd muss/" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/muss\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "rrgR24AYN87s", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "931beeff-e59a-49ff-ef9b-50ba10e26f65" + }, + "source": [ + "%run scripts/simplify.py scripts/examples.en --model-name muss_en_wikilarge_mined" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Downloading...\n", + "... 100% - 3835 MB - 27.72 MB/s - 138s\n", + "Extracting...\n", + "Downloading...\n", + "... 100% - 1 MB - 7.52 MB/s - 0s\n", + "Downloading...\n", + "... 100% - 0 MB - 4.31 MB/s - 0s\n", + "Downloading...\n", + "... 100% - 0 MB - 4.66 MB/s - 0s\n" + ], + "name": "stdout" + }, + { + "output_type": "stream", + "text": [ + "INFO:fairseq_cli.generate:Namespace(all_gather_list_size=16384, batch_size=None, batch_size_valid=None, beam=5, bf16=False, bpe=None, broadcast_buffers=False, bucket_cap_mb=25, checkpoint_shard_count=1, checkpoint_suffix='', constraints=None, cpu=False, criterion='cross_entropy', curriculum=0, data='/tmp/tmp1sc0qjne', data_buffer_size=10, dataset_impl='raw', ddp_backend='c10d', decoding_format=None, device_id=0, disable_validation=False, distributed_backend='nccl', distributed_init_method=None, distributed_no_spawn=False, distributed_port=-1, distributed_rank=0, distributed_world_size=1, distributed_wrapper='DDP', diverse_beam_groups=-1, diverse_beam_strength=0.5, diversity_rate=-1.0, empty_cache_freq=0, eval_bleu=False, eval_bleu_args=None, eval_bleu_detok='space', eval_bleu_detok_args=None, eval_bleu_print_samples=False, eval_bleu_remove_bpe=None, eval_tokenized_bleu=False, fast_stat_sync=False, find_unused_parameters=False, fix_batches_to_gpus=False, fixed_validation_seed=None, force_anneal=None, fp16=False, fp16_init_scale=128, fp16_no_flatten_grads=False, fp16_scale_tolerance=0.0, fp16_scale_window=None, gen_subset='tmp', iter_decode_eos_penalty=0.0, iter_decode_force_max_iter=False, iter_decode_max_iter=10, iter_decode_with_beam=1, iter_decode_with_external_reranker=False, left_pad_source='True', left_pad_target='False', lenpen=1.0, lm_path=None, lm_weight=0.0, load_alignments=False, localsgd_frequency=3, log_format=None, log_interval=100, lr_scheduler='fixed', lr_shrink=0.1, match_source_len=False, max_len_a=0, max_len_b=200, max_source_positions=1024, max_target_positions=1024, max_tokens=8000, max_tokens_valid=8000, memory_efficient_bf16=False, memory_efficient_fp16=False, min_len=1, min_loss_scale=0.0001, model_overrides=\"{'encoder_embed_path': None, 'decoder_embed_path': None}\", model_parallel_size=1, nbest=1, no_beamable_mm=False, no_early_stop=False, no_progress_bar=False, no_repeat_ngram_size=0, no_seed_provided=False, nprocs_per_node=1, num_batch_buckets=0, num_shards=1, num_workers=1, optimizer=None, path='/content/muss/resources/models/muss_en_wikilarge_mined/model.pt', pipeline_balance=None, pipeline_checkpoint='never', pipeline_chunks=0, pipeline_decoder_balance=None, pipeline_decoder_devices=None, pipeline_devices=None, pipeline_encoder_balance=None, pipeline_encoder_devices=None, pipeline_model_parallel=False, prefix_size=0, print_alignment=False, print_step=False, profile=False, quantization_config_path=None, quiet=False, remove_bpe=None, replace_unk=None, required_batch_size_multiple=8, required_seq_len_multiple=1, results_path=None, retain_dropout=False, retain_dropout_modules=None, retain_iter_history=False, sacrebleu=False, sampling=False, sampling_topk=-1, sampling_topp=-1.0, score_reference=False, scoring='bleu', seed=1, shard_id=0, skip_invalid_size_inputs_valid_test=True, slowmo_algorithm='LocalSGD', slowmo_momentum=None, source_lang=None, target_lang=None, task='translation', temperature=1.0, tensorboard_logdir=None, threshold_loss_scale=None, tokenizer=None, tpu=False, train_subset='train', truncate_source=False, unkpen=0, unnormalized=False, upsample_primary=1, user_dir=None, valid_subset='valid', validate_after_updates=0, validate_interval=1, validate_interval_updates=0, warmup_updates=0, zero_sharding='none')\n", + "INFO:fairseq.tasks.translation:[complex] dictionary: 50264 types\n", + "INFO:fairseq.tasks.translation:[simple] dictionary: 50264 types\n", + "INFO:fairseq.data.data_utils:loaded 4 examples from: /tmp/tmp1sc0qjne/tmp.complex-simple.complex\n", + "INFO:fairseq.data.data_utils:loaded 4 examples from: /tmp/tmp1sc0qjne/tmp.complex-simple.simple\n", + "INFO:fairseq.tasks.translation:/tmp/tmp1sc0qjne tmp complex-simple 4 examples\n", + "INFO:fairseq_cli.generate:loading model(s) from /content/muss/resources/models/muss_en_wikilarge_mined/model.pt\n", + "/usr/local/lib/python3.7/dist-packages/torch/_tensor.py:575: UserWarning: floor_divide is deprecated, and will be removed in a future version of pytorch. It currently rounds toward 0 (like the 'trunc' function NOT 'floor'). This results in incorrect rounding for negative values.\n", + "To keep the current behavior, use torch.div(a, b, rounding_mode='trunc'), or for actual floor division, use torch.div(a, b, rounding_mode='floor'). (Triggered internally at /pytorch/aten/src/ATen/native/BinaryOps.cpp:467.)\n", + " return torch.floor_divide(self, other)\n", + "INFO:fairseq_cli.generate:NOTE: hypothesis and token scores are output in base 2\n", + "INFO:fairseq_cli.generate:Translated 4 sentences (55 tokens) in 0.7s (5.60 sentences/s, 77.01 tokens/s)\n" + ], + "name": "stderr" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "Y4Tqmy0Kurhx" + }, + "source": [ + "---\n", + "### Get desired wikipedia article" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "f79PqoSt94_C" + }, + "source": [ + "#####**Extract wikipedia articles**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Dk3oPZKO-YtE", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "05d28dc1-20df-4da2-f25f-523f4da34906" + }, + "source": [ + "%pip install wikipedia" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Requirement already satisfied: wikipedia in /usr/local/lib/python3.7/dist-packages (1.4.0)\n", + "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.7/dist-packages (from wikipedia) (4.6.3)\n", + "Requirement already satisfied: requests<3.0.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from wikipedia) (2.23.0)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.0.0->wikipedia) (2.10)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.0.0->wikipedia) (3.0.4)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.0.0->wikipedia) (2021.5.30)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.0.0->wikipedia) (1.24.3)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "5fhPyHYfRtiB" + }, + "source": [ + "# use text-wrapping in output\n", + "from IPython.display import HTML, display\n", + "\n", + "def set_css():\n", + " display(HTML(''''''))\n", + "\n", + "get_ipython().events.register('pre_run_cell', set_css)" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 52 + }, + "id": "cqQ_dLFZMWCK", + "outputId": "97ea35bc-e42a-4631-943b-3c7fb5790e22" + }, + "source": [ + "import wikipedia\n", + "from collections import OrderedDict\n", + "\n", + "result = wikipedia.search(\"messi ronaldo rivalry\")\n", + "print(result)" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "['Messi–Ronaldo rivalry', 'Cristiano Ronaldo', 'Lionel Messi', 'List of career achievements by Cristiano Ronaldo', 'Ronaldo (Brazilian footballer)', 'List of career achievements by Lionel Messi', \"2012 FIFA Ballon d'Or\", 'New Maradona', \"Ballon d'Or\", \"Ballon d'Or Dream Team\"]\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "3p72PU3rOnQz", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 17 + }, + "outputId": "45c88a8c-5e24-4c91-fa93-f57f3c540f24" + }, + "source": [ + "page = wikipedia.page(result[0])\n", + "content = page.content" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "JwIEiX-DYw3N" + }, + "source": [ + "#####**Convert the article into a dictionary**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "dV4uufc7TUM9", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 17 + }, + "outputId": "0eb26f0d-fe55-4db1-b484-8531b74f2fde" + }, + "source": [ + "content_list = content.split('\\n\\n\\n')\n", + "\n", + "complex_sentences_dict = OrderedDict()\n", + "complex_sentences_dict['== Introduction =='] = content_list.pop(0)\n", + "\n", + "for section in content_list:\n", + " section_split = section.split('\\n')\n", + "\n", + " title = section_split.pop(0)\n", + " content = ''\n", + " for para in section_split:\n", + " if para != '':\n", + " content += para\n", + " \n", + " if len(content) > 200:\n", + " complex_sentences_dict[title] = content" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "id": "cq4ePpRhWYii", + "outputId": "f2c9d9dc-cd13-4922-9337-a70644f2974d" + }, + "source": [ + "# print the wiki article\n", + "for title, content in complex_sentences_dict.items():\n", + " print(title, content, '\\n---\\n', sep='\\n')" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "== Introduction ==\n", + "The Messi–Ronaldo rivalry is an association football rivalry between Argentine forward Lionel Messi and Portuguese forward Cristiano Ronaldo. Having won a combined eleven Ballon d'Or/FIFA Ballon d'Or awards (six for Messi and five for Ronaldo) and ten European Golden Shoe awards (six for Messi and four for Ronaldo), both are widely regarded as two of the greatest players of all time. They are two of the most decorated football players ever, having won a combined 66 trophies (Ronaldo 32, Messi 34) during their senior careers thus far, and have regularly broken the 50-goal barrier in a single season. They are amongst the 51 players in the history of the sport to score over 500 career goals, with both having scored over 700 goals each in their careers for club and country. Ronaldo currently holds the world record for most official goals in a career, while Messi has the superior goals-to-games ratio (0.81, to Ronaldo's 0.73).Journalists and pundits regularly argue the individual merits of both players in an attempt to establish who they believe is the best attacker in modern football or ever. Regardless of preference, football critics generally agree that both are the greatest players of their generation, outperforming their peers numerically, largely by a significant margin. Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's physical attributes, goalscoring skills, leadership and influence under pressure is well-appreciated worldwide. It has been compared to past global sports rivalries such as the Magic Johnson-Larry Bird rivalry in basketball, the Muhammad Ali–Joe Frazier rivalry in boxing, the Roger Federer–Rafael Nadal rivalry and the Björn Borg–John McEnroe rivalry in tennis, and the Alain Prost–Ayrton Senna rivalry in Formula One motor racing. Some commentators choose to analyse the differing physiques and playing styles of the two, while part of the debate revolves around the contrasting personalities of the two players; Ronaldo is often described as someone of temperamental character while Messi is considered to have a more reserved character.At club level, Messi and Ronaldo represented rivals FC Barcelona and Real Madrid CF for nearly a decade. The two players faced each other at least twice every season in the world's most popular regular-season club game, El Clásico (among the most viewed annual sporting events), from Ronaldo's arrival at Madrid in 2009 until his transfer to Italian club Juventus F.C. in 2018. Off the field, they are also the face of two rival sportswear manufacturers, Messi of Adidas and Ronaldo of Nike, which are also the kit suppliers of their national teams and the opposite for their clubs. The two highest-paid players in football, Messi and Ronaldo are among the world's best-paid sports stars in combined income from salaries, bonuses and off-field earnings. In 2018, Messi topped Ronaldo on the Forbes' list of highest-paid sportspeople, earning $111 million, with Ronaldo next on the list at $108 million.\n", + "\n", + "---\n", + "\n", + "== History ==\n", + "In 2007, Ronaldo and Messi finished as runners-up to A.C. Milan's Kaká in both the Ballon d'Or, an award rewarded to the player voted as the best in the world by an international panel of sports journalists; and the FIFA World Player of the Year, an award voted for by coaches and captains of international teams. In an interview that year, Messi was quoted as saying that \"Cristiano Ronaldo is an extraordinary player and it would be brilliant to be in the same team as him.\"They first played in a game against each other when Manchester United were drawn to play Barcelona in the 2007–08 UEFA Champions League semi-finals and were immediately pitted as major rivals. Ronaldo missed a penalty in the first leg, but United eventually advanced to the final via a Paul Scholes goal. At the end of the year, Ronaldo was awarded the Ballon d'Or and vowed that he would win the award again.The 2009 UEFA Champions League Final was contested between Manchester United and Barcelona on 27 May 2009 at the Stadio Olimpico in Rome, Italy. The match, described as a \"dream clash\", was again hyped as the latest battle between the two, this time to settle who was the best player in the world; Ronaldo claimed he was the better of the two, while Messi's club-mate Xavi sided with him. Manchester United manager Alex Ferguson was more diplomatic, praising both players as being amongst the world's elite talents. Messi, playing in a central role he was unaccustomed to so he would avoid a direct battle with Manchester United left-back Patrice Evra, scored Barcelona's second in a 2–0 victory with a header in the 70th minute. Meanwhile, Ronaldo was subdued for much of the game, despite some early chances to score, and his frustration eventually showed when he was booked for a rash tackle on Carles Puyol.From 2009 to 2018, the two played against each other at least twice per season during El Clásico matches but also met many other times competitions such as the Copa del Rey, the Supercopa de España, and, a two-legged Champions League semifinal in 2011. Messi won four consecutive Ballon d'Or awards (2009 to 2012), with a fifth coming in 2015, while Ronaldo had equalled Messi's total of five with wins in 2013, 2014, 2016, and 2017. In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. In total, Messi and Ronaldo reached the podium a record twelve times each. Messi won two Champions League while Ronaldo was playing for Real Madrid, with the latter securing four out of five titles between 2014 and 2018. Messi during this period won five Pichichi trophies and European Golden Shoe awards (2010, 2012, 2013, 2017 and 2018), while Ronaldo won these prizes thrice each (2011, 2014 and 2015). From 2009 to 2018, Ronaldo was the Champions League top scorer on six occasions, with Messi achieving this feat four times (including in 2015 when the pair finished joint-top).\n", + "\n", + "---\n", + "\n", + "== Relationship between Messi and Ronaldo ==\n", + "In a 2015 interview, Ronaldo commented on the rivalry by saying: \"I think we push each other sometimes in the competition, this is why the competition is so high.\" Alex Ferguson, Ronaldo's manager during his time at Manchester United, opined: \"I don't think the rivalry against each other bothers them. I think they have their own personal pride in terms of wanting to be the best.\" Messi has denied any rivalry, and blames the media for creating it, stating that \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano.\"It is widely argued and documented that there is an atmosphere of competition between the duo, with Guillem Balagué claiming in the book Ronaldo that he refers to his Argentine counterpart as a \"motherfucker\" behind his back, and Luca Caioli saying in his book Ronaldo: The Obsession for Perfection that, according to his sources, Ronaldo heats up when watching Messi play. In response to claims that he and Messi do not get on well on a personal level, Ronaldo commented: \"We don't have a relationship outside the world of football, just as we don't with a lot of other players\", before adding that in years to come he hopes they can laugh about it together, stating: \"We have to look on this rivalry with a positive spirit, because it's a good thing.\" On 13 November 2014, Ronaldo also threatened to take legal action over the remarks made by Balagué. After Ronaldo's departure from Real Madrid to Juventus, Messi admitted to missing him, saying: \"I miss Cristiano. Although it was a bit difficult to see him win trophies, he gave La Liga prestige.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\"\n", + "\n", + "---\n", + "\n", + "== Awards and records ==\n", + "Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken a multitude of goalscoring records for both club and country, feats which have been described as \"incredible\", \"ridiculous\", and \"remarkable\", respectively. The rivalry itself has been described as one about records and reputation of the players, rather than one based in loathing.Messi is the all-time La Liga top scorer, as well as having the most goal contributions (goals + assists) in the history of the top European leagues, with comfortably over 1,000 totals; meanwhile, Ronaldo places second for goals scored and in third for assists provided, whilst being the UEFA Champions League all-time top goalscorer and assist provider, with Messi in second for both goals scored and assists provided in the competition's history, although once more having a superior ratio of goal contributions to games. Messi also holds both the most goals in a season (73), and second most assists in a season (34), just one behind midfielder Juan Mata's record. He is the outright holder of most goal contributions in a season, with 107 in 2011–12. The two had broken each other's record over the course of 2015 after Messi surpassed the previous record holder Raúl in November 2014. Ronaldo opened a gap in the 2015–16 season when he became the first player to score double figures in the group stage of the Champions League, setting the record at 11 goals. They are the first two players to score 100 goals in UEFA Champions League history.They dominated the Ballon d'Or/The Best FIFA Men's Player awards since 2008, and UEFA Men's Player of the Year Award since 2014; in 2018, their roughly decade-long triumph was ended by Luka Modrić, seen as \"the end of an era.\" In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. Nobody has the right to compare themselves to them.\" Since 2008, Messi has won six Ballons d'Or to Ronaldo's five, six FIFA World's Best Player awards to Ronaldo's five, and six European Golden Shoes to Ronaldo's four.Collectively, Ronaldo has won 32 major trophies including seven league titles and five Champions Leagues, and guided Portugal to their first trophies UEFA Euro 2016 and the 2019 UEFA Nations League. He has also won four national cups, two league cups, six national super cups, two European Super Cups, and four FIFA Club World Cups.Messi has won a club record 34 major trophies as a Barcelona player, including ten league titles, four Champions Leagues, seven Copa del Rey titles, seven Spanish Super Cups, three European Super Cups, and three FIFA Club World Cups. As for youth competitions, Messi won a gold medal at the 2008 Olympic Games held in China, and he had previously won the FIFA U-20 World Cup in 2005, being elected best player and top scorer of the tournament. Moreover, Messi was runner-up at three Copa Américas and at the 2014 World Cup, before finally claiming his first major international trophy at the 2021 Copa América where he was named joint-best player and top scorer.\n", + "\n", + "---\n", + "\n", + "== Head-to-head ==\n", + "In past years, it had been said that the rivalry between Real Madrid and Barcelona has been \"encapsulated\" by the individual rivalry between Ronaldo and Messi, with there never being a goalless El Clásico between the two.\n", + "\n", + "---\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "6zZmvSG-a-xt" + }, + "source": [ + "---\n", + "### Simplify the article" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 52 + }, + "id": "AC70dZ6BwGIE", + "outputId": "2ef81f95-f4ee-41d7-80e5-0d6624a62e8b" + }, + "source": [ + "import nltk.data\n", + "nltk.download('punkt')\n", + "tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')\n", + "\n", + "sen_length = []\n", + "complex_sen_list = []\n", + "\n", + "with open('complex.en', 'w') as fo:\n", + " for title, content in complex_sentences_dict.items():\n", + " content = tokenizer.tokenize(content)\n", + " complex_sen_list = complex_sen_list + content\n", + " sen_length.append(len(content))\n", + "\n", + " content = '\\n'.join(content)\n", + " fo.write(content + '\\n')\n", + "\n", + "complex_text = ' '.join(complex_sen_list)" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "[nltk_data] Downloading package punkt to /root/nltk_data...\n", + "[nltk_data] Package punkt is already up-to-date!\n" + ], + "name": "stderr" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 691 + }, + "id": "wHYOYL4NwGIG", + "outputId": "6ee79e6f-f36b-438d-9ad3-f9d868d2d848" + }, + "source": [ + "%run scripts/simplify.py complex.en --model-name muss_en_wikilarge_mined" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "INFO:fairseq_cli.generate:Namespace(all_gather_list_size=16384, batch_size=None, batch_size_valid=None, beam=5, bf16=False, bpe=None, broadcast_buffers=False, bucket_cap_mb=25, checkpoint_shard_count=1, checkpoint_suffix='', constraints=None, cpu=False, criterion='cross_entropy', curriculum=0, data='/tmp/tmpv1u5fmdc', data_buffer_size=10, dataset_impl='raw', ddp_backend='c10d', decoding_format=None, device_id=0, disable_validation=False, distributed_backend='nccl', distributed_init_method=None, distributed_no_spawn=False, distributed_port=-1, distributed_rank=0, distributed_world_size=1, distributed_wrapper='DDP', diverse_beam_groups=-1, diverse_beam_strength=0.5, diversity_rate=-1.0, empty_cache_freq=0, eval_bleu=False, eval_bleu_args=None, eval_bleu_detok='space', eval_bleu_detok_args=None, eval_bleu_print_samples=False, eval_bleu_remove_bpe=None, eval_tokenized_bleu=False, fast_stat_sync=False, find_unused_parameters=False, fix_batches_to_gpus=False, fixed_validation_seed=None, force_anneal=None, fp16=False, fp16_init_scale=128, fp16_no_flatten_grads=False, fp16_scale_tolerance=0.0, fp16_scale_window=None, gen_subset='tmp', iter_decode_eos_penalty=0.0, iter_decode_force_max_iter=False, iter_decode_max_iter=10, iter_decode_with_beam=1, iter_decode_with_external_reranker=False, left_pad_source='True', left_pad_target='False', lenpen=1.0, lm_path=None, lm_weight=0.0, load_alignments=False, localsgd_frequency=3, log_format=None, log_interval=100, lr_scheduler='fixed', lr_shrink=0.1, match_source_len=False, max_len_a=0, max_len_b=200, max_source_positions=1024, max_target_positions=1024, max_tokens=8000, max_tokens_valid=8000, memory_efficient_bf16=False, memory_efficient_fp16=False, min_len=1, min_loss_scale=0.0001, model_overrides=\"{'encoder_embed_path': None, 'decoder_embed_path': None}\", model_parallel_size=1, nbest=1, no_beamable_mm=False, no_early_stop=False, no_progress_bar=False, no_repeat_ngram_size=0, no_seed_provided=False, nprocs_per_node=1, num_batch_buckets=0, num_shards=1, num_workers=1, optimizer=None, path='/content/muss/resources/models/muss_en_wikilarge_mined/model.pt', pipeline_balance=None, pipeline_checkpoint='never', pipeline_chunks=0, pipeline_decoder_balance=None, pipeline_decoder_devices=None, pipeline_devices=None, pipeline_encoder_balance=None, pipeline_encoder_devices=None, pipeline_model_parallel=False, prefix_size=0, print_alignment=False, print_step=False, profile=False, quantization_config_path=None, quiet=False, remove_bpe=None, replace_unk=None, required_batch_size_multiple=8, required_seq_len_multiple=1, results_path=None, retain_dropout=False, retain_dropout_modules=None, retain_iter_history=False, sacrebleu=False, sampling=False, sampling_topk=-1, sampling_topp=-1.0, score_reference=False, scoring='bleu', seed=1, shard_id=0, skip_invalid_size_inputs_valid_test=True, slowmo_algorithm='LocalSGD', slowmo_momentum=None, source_lang=None, target_lang=None, task='translation', temperature=1.0, tensorboard_logdir=None, threshold_loss_scale=None, tokenizer=None, tpu=False, train_subset='train', truncate_source=False, unkpen=0, unnormalized=False, upsample_primary=1, user_dir=None, valid_subset='valid', validate_after_updates=0, validate_interval=1, validate_interval_updates=0, warmup_updates=0, zero_sharding='none')\n", + "INFO:fairseq.tasks.translation:[complex] dictionary: 50264 types\n", + "INFO:fairseq.tasks.translation:[simple] dictionary: 50264 types\n", + "INFO:fairseq.data.data_utils:loaded 53 examples from: /tmp/tmpv1u5fmdc/tmp.complex-simple.complex\n", + "INFO:fairseq.data.data_utils:loaded 53 examples from: /tmp/tmpv1u5fmdc/tmp.complex-simple.simple\n", + "INFO:fairseq.tasks.translation:/tmp/tmpv1u5fmdc tmp complex-simple 53 examples\n", + "INFO:fairseq_cli.generate:loading model(s) from /content/muss/resources/models/muss_en_wikilarge_mined/model.pt\n", + "/usr/local/lib/python3.7/dist-packages/torch/_tensor.py:575: UserWarning: floor_divide is deprecated, and will be removed in a future version of pytorch. It currently rounds toward 0 (like the 'trunc' function NOT 'floor'). This results in incorrect rounding for negative values.\n", + "To keep the current behavior, use torch.div(a, b, rounding_mode='trunc'), or for actual floor division, use torch.div(a, b, rounding_mode='floor'). (Triggered internally at /pytorch/aten/src/ATen/native/BinaryOps.cpp:467.)\n", + " return torch.floor_divide(self, other)\n", + "INFO:fairseq_cli.generate:NOTE: hypothesis and token scores are output in base 2\n", + "INFO:fairseq_cli.generate:Translated 53 sentences (2218 tokens) in 6.6s (8.04 sentences/s, 336.45 tokens/s)\n" + ], + "name": "stderr" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "BwjEbOHtnGZm", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 17 + }, + "outputId": "404ba4cd-ed66-4704-a4e4-ebb45fc23a79" + }, + "source": [ + "import pickle\n", + "\n", + "with open(\"simple.pkl\", \"rb\") as fo:\n", + " simple_sentences = pickle.load(fo)\n", + "\n", + "simple_text = ' '.join(simple_sentences)" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "2sK8KOsW_yje" + }, + "source": [ + "#####**Complex-Simple pairs**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "IjhMeYI_wGIH", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "3e33546f-a5f7-439f-94ef-bc04f48655c2" + }, + "source": [ + "for simp, comp in zip(simple_sentences, complex_sen_list):\n", + " print(f'Complex== {comp} \\nSimple== {simp}\\n')" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "Complex: The Messi–Ronaldo rivalry is an association football rivalry between Argentine forward Lionel Messi and Portuguese forward Cristiano Ronaldo. \n", + "Simple: The Messi-Ronaldo rivalry is a football rivalry between two very good football players: Cristiano Ronaldo and Lionel Messi.\n", + "\n", + "Complex: Having won a combined eleven Ballon d'Or/FIFA Ballon d'Or awards (six for Messi and five for Ronaldo) and ten European Golden Shoe awards (six for Messi and four for Ronaldo), both are widely regarded as two of the greatest players of all time. \n", + "Simple: Many people think that both Messi and Ronaldo are the best players in the world today. Both have won the Ballon d'Or many times, six for Messi and five for Ronaldo, as well as many other awards, such as the European Golden Shoe.\n", + "\n", + "Complex: They are two of the most decorated football players ever, having won a combined 66 trophies (Ronaldo 32, Messi 34) during their senior careers thus far, and have regularly broken the 50-goal barrier in a single season. \n", + "Simple: They are two of the most successful football players ever, having won a combined 66 trophies. Messi has won 34 trophies in his senior career, and Ronaldo has won 32 trophies in his senior career.\n", + "\n", + "Complex: They are amongst the 51 players in the history of the sport to score over 500 career goals, with both having scored over 700 goals each in their careers for club and country. \n", + "Simple: They are the only two players in the history of the sport to score 500 or more goals in a single season. They have both scored over 700 goals in their careers.\n", + "\n", + "Complex: Ronaldo currently holds the world record for most official goals in a career, while Messi has the superior goals-to-games ratio (0.81, to Ronaldo's 0.73).Journalists and pundits regularly argue the individual merits of both players in an attempt to establish who they believe is the best attacker in modern football or ever. \n", + "Simple: Ronaldo currently holds the world record for most official goals in a career, while Messi has the superior goals-to-games ratio (0.81, to Ronaldo's 0.73). Many people think that Ronaldo is the best player in the world today, while some people think that Messi is the best player ever.\n", + "\n", + "Complex: Regardless of preference, football critics generally agree that both are the greatest players of their generation, outperforming their peers numerically, largely by a significant margin. \n", + "Simple: No matter what people think, football people generally agree that both are the greatest players of their time. Many people think they are the best players of their time.\n", + "\n", + "Complex: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's physical attributes, goalscoring skills, leadership and influence under pressure is well-appreciated worldwide. \n", + "Simple: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's leadership and influence under pressure is well-appreciated worldwide.\n", + "\n", + "Complex: It has been compared to past global sports rivalries such as the Magic Johnson-Larry Bird rivalry in basketball, the Muhammad Ali–Joe Frazier rivalry in boxing, the Roger Federer–Rafael Nadal rivalry and the Björn Borg–John McEnroe rivalry in tennis, and the Alain Prost–Ayrton Senna rivalry in Formula One motor racing. \n", + "Simple: It has been compared to past global sports rivalries such as the Muhammad Ali-Joe Frazier rivalry in boxing, the Alain Prost-Ayrton Senna rivalry in Formula One motor racing, the Björn Borg-John McEnroe rivalry in tennis, and the Roger Federer-Rafael Nadal rivalry in tennis.\n", + "\n", + "Complex: Some commentators choose to analyse the differing physiques and playing styles of the two, while part of the debate revolves around the contrasting personalities of the two players; Ronaldo is often described as someone of temperamental character while Messi is considered to have a more reserved character.At club level, Messi and Ronaldo represented rivals FC Barcelona and Real Madrid CF for nearly a decade. \n", + "Simple: Some commentators choose to analyse the differing physiques and playing styles of the two, while part of the debate revolves around the contrasting personalities of the two players; Messi is often described as a player with a strong personality, while Ronaldo is considered to have a more reserved character.\n", + "\n", + "Complex: The two players faced each other at least twice every season in the world's most popular regular-season club game, El Clásico (among the most viewed annual sporting events), from Ronaldo's arrival at Madrid in 2009 until his transfer to Italian club Juventus F.C. \n", + "Simple: The two players play against each other at least twice every season in the world's most popular regular-season club game, El Clásico. Since Ronaldo joined Madrid in 2009, they have played against each other at least twice every season.\n", + "\n", + "Complex: in 2018. \n", + "Simple: in 2018.\n", + "\n", + "Complex: Off the field, they are also the face of two rival sportswear manufacturers, Messi of Adidas and Ronaldo of Nike, which are also the kit suppliers of their national teams and the opposite for their clubs. \n", + "Simple: Off the field, they are also the face of two rival sportswear manufacturers, Adidas and Nike. Adidas and Nike both supply the national teams of both Messi and Ronaldo.\n", + "\n", + "Complex: The two highest-paid players in football, Messi and Ronaldo are among the world's best-paid sports stars in combined income from salaries, bonuses and off-field earnings. \n", + "Simple: The two highest-paid players in football, Messi and Ronaldo are among the world's best-paid sports stars. They both make a lot of money from their jobs.\n", + "\n", + "Complex: In 2018, Messi topped Ronaldo on the Forbes' list of highest-paid sportspeople, earning $111 million, with Ronaldo next on the list at $108 million. \n", + "Simple: In 2018, Messi made $111 million on the Forbes list of the world's most paid sportspeople. Ronaldo made $108 million on the list.\n", + "\n", + "Complex: In 2007, Ronaldo and Messi finished as runners-up to A.C. Milan's Kaká in both the Ballon d'Or, an award rewarded to the player voted as the best in the world by an international panel of sports journalists; and the FIFA World Player of the Year, an award voted for by coaches and captains of international teams. \n", + "Simple: In 2007, Ronaldo and Messi finished as runners-up to A.C. Milan's Kaká in both the Ballon d'Or and the FIFA World Player of the Year. The Ballon d'Or is an award given to the best player in the world by an international group of sports journalists.\n", + "\n", + "Complex: In an interview that year, Messi was quoted as saying that \"Cristiano Ronaldo is an extraordinary player and it would be brilliant to be in the same team as him. \n", + "Simple: In an interview that year, Messi said that he would like to play with Ronaldo. He said that \"Cristiano Ronaldo is an extraordinary player.\n", + "\n", + "Complex: \"They first played in a game against each other when Manchester United were drawn to play Barcelona in the 2007–08 UEFA Champions League semi-finals and were immediately pitted as major rivals. \n", + "Simple: They first played against each other when Manchester United were playing Barcelona in the UEFA Champions League in 2007. They played against each other for the first time in this game.\n", + "\n", + "Complex: Ronaldo missed a penalty in the first leg, but United eventually advanced to the final via a Paul Scholes goal. \n", + "Simple: In the first leg, Ronaldo missed a penalty. In the end, Paul Scholes scored United's goal.\n", + "\n", + "Complex: At the end of the year, Ronaldo was awarded the Ballon d'Or and vowed that he would win the award again.The 2009 UEFA Champions League Final was contested between Manchester United and Barcelona on 27 May 2009 at the Stadio Olimpico in Rome, Italy. \n", + "Simple: At the end of the year, Ronaldo was awarded the Ballon d'Or and said that he would win the award again. The 2009 UEFA Champions League Final was played on May 27, 2009 at the Stadio Olimpico in Rome, Italy.\n", + "\n", + "Complex: The match, described as a \"dream clash\", was again hyped as the latest battle between the two, this time to settle who was the best player in the world; Ronaldo claimed he was the better of the two, while Messi's club-mate Xavi sided with him. \n", + "Simple: The match, described as a \"dream clash\", was again hyped as the latest battle between the two, with Messi's club-mate Xavi saying that Ronaldo was the better player, while Ronaldo said that he was the best player in the world.\n", + "\n", + "Complex: Manchester United manager Alex Ferguson was more diplomatic, praising both players as being amongst the world's elite talents. \n", + "Simple: Manchester United manager Alex Ferguson thought that both players were very good. He said that they were the best in the world.\n", + "\n", + "Complex: Messi, playing in a central role he was unaccustomed to so he would avoid a direct battle with Manchester United left-back Patrice Evra, scored Barcelona's second in a 2–0 victory with a header in the 70th minute. \n", + "Simple: Messi, playing in a central role so he would not have to fight with Manchester United left-back Patrice Evra, scored Barcelona's second goal in a 2-0 victory. He scored the goal in the 70th minute.\n", + "\n", + "Complex: Meanwhile, Ronaldo was subdued for much of the game, despite some early chances to score, and his frustration eventually showed when he was booked for a rash tackle on Carles Puyol.From 2009 to 2018, the two played against each other at least twice per season during El Clásico matches but also met many other times competitions such as the Copa del Rey, the Supercopa de España, and, a two-legged Champions League semifinal in 2011. \n", + "Simple: Meanwhile, Ronaldo was subdued for much of the game, despite some early chances to score, and his frustration eventually showed when he was booked for a rash tackle on Carles Puyol. The two have played against each other at least twice a season since the beginning of the 2009\\/10 season, including a two-legged Champions League semifinal in 2011.\n", + "\n", + "Complex: Messi won four consecutive Ballon d'Or awards (2009 to 2012), with a fifth coming in 2015, while Ronaldo had equalled Messi's total of five with wins in 2013, 2014, 2016, and 2017. \n", + "Simple: Messi has won the Ballon d'Or four times in a row (2009 to 2012), while Ronaldo has won it five times. Ronaldo won the award in 2013, 2014, 2016, and 2017.\n", + "\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. \n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third.\n", + "\n", + "Complex: In total, Messi and Ronaldo reached the podium a record twelve times each. \n", + "Simple: In total, Messi and Ronaldo have both won the World Cup twelve times.\n", + "\n", + "Complex: Messi won two Champions League while Ronaldo was playing for Real Madrid, with the latter securing four out of five titles between 2014 and 2018. \n", + "Simple: While Ronaldo was playing for Real Madrid, Messi won two Champions League games. Real Madrid have won four out of five titles since 2014.\n", + "\n", + "Complex: Messi during this period won five Pichichi trophies and European Golden Shoe awards (2010, 2012, 2013, 2017 and 2018), while Ronaldo won these prizes thrice each (2011, 2014 and 2015). \n", + "Simple: During this time, Messi has won five Pichichi trophies and three European Golden Shoe awards. Ronaldo has won these awards three times, once each in 2011, 2014 and 2015.\n", + "\n", + "Complex: From 2009 to 2018, Ronaldo was the Champions League top scorer on six occasions, with Messi achieving this feat four times (including in 2015 when the pair finished joint-top). \n", + "Simple: From 2009 to 2018, Ronaldo has scored more goals in the Champions League than Messi four times. In 2015, the two players finished as the top scorers together.\n", + "\n", + "Complex: In a 2015 interview, Ronaldo commented on the rivalry by saying: \"I think we push each other sometimes in the competition, this is why the competition is so high.\" \n", + "Simple: In a 2015 interview, Ronaldo talked about the rivalry. He said: \"Sometimes we push each other a little bit, that's why the competition is so good.\n", + "\n", + "Complex: Alex Ferguson, Ronaldo's manager during his time at Manchester United, opined: \"I don't think the rivalry against each other bothers them. \n", + "Simple: Alex Ferguson, who managed Ronaldo when he was at Manchester United, said that he does not care about the other players.\n", + "\n", + "Complex: I think they have their own personal pride in terms of wanting to be the best.\" \n", + "Simple: I think they have their own personal pride. They want to be the best at it.\"\n", + "\n", + "Complex: Messi has denied any rivalry, and blames the media for creating it, stating that \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano. \n", + "Simple: Messi has denied any rivalry, and blames the media for creating it. He said that \"only the media, the press, wants us to be at loggerheads (not playing against each other).\n", + "\n", + "Complex: \"It is widely argued and documented that there is an atmosphere of competition between the duo, with Guillem Balagué claiming in the book Ronaldo that he refers to his Argentine counterpart as a \"motherfucker\" behind his back, and Luca Caioli saying in his book Ronaldo: The Obsession for Perfection that, according to his sources, Ronaldo heats up when watching Messi play. \n", + "Simple: \"It is widely argued and documented that there is an atmosphere of competition between the duo, with Luca Caioli saying in his book Ronaldo: The Obsession for Perfection that, according to his sources, Ronaldo heats up when watching Messi play, and Guillem Balagué claiming in the book that he refers to his Argentine counterpart as a \"motherfucker\" behind his back.\n", + "\n", + "Complex: In response to claims that he and Messi do not get on well on a personal level, Ronaldo commented: \"We don't have a relationship outside the world of football, just as we don't with a lot of other players\", before adding that in years to come he hopes they can laugh about it together, stating: \"We have to look on this rivalry with a positive spirit, because it's a good thing.\" \n", + "Simple: He has said that he and Messi do not get on well on a personal level, and that they do not have a good relationship outside of football. He said that they do not have a good relationship with a lot of other players, and that in years to come he hopes they can laugh about it together.\n", + "\n", + "Complex: On 13 November 2014, Ronaldo also threatened to take legal action over the remarks made by Balagué. \n", + "Simple: On 13 November 2014, Ronaldo also said that he would take legal action against Balagué.\n", + "\n", + "Complex: After Ronaldo's departure from Real Madrid to Juventus, Messi admitted to missing him, saying: \"I miss Cristiano. \n", + "Simple: After Ronaldo left Real Madrid to join Juventus, Messi said that he \"missed\" him, \"I miss Cristiano\".\n", + "\n", + "Complex: Although it was a bit difficult to see him win trophies, he gave La Liga prestige.\" \n", + "Simple: It was a little hard for him to win trophies, but he gave La Liga importance.\"\n", + "\n", + "Complex: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\" \n", + "Simple: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\"\n", + "\n", + "Complex: Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken a multitude of goalscoring records for both club and country, feats which have been described as \"incredible\", \"ridiculous\", and \"remarkable\", respectively. \n", + "Simple: Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken many goalscoring records for both club and country. People have said that they are both \"incredible\" and \"remarkable\" for having scored so many goals.\n", + "\n", + "Complex: The rivalry itself has been described as one about records and reputation of the players, rather than one based in loathing.Messi is the all-time La Liga top scorer, as well as having the most goal contributions (goals + assists) in the history of the top European leagues, with comfortably over 1,000 totals; meanwhile, Ronaldo places second for goals scored and in third for assists provided, whilst being the UEFA Champions League all-time top goalscorer and assist provider, with Messi in second for both goals scored and assists provided in the competition's history, although once more having a superior ratio of goal contributions to games. \n", + "Simple: Messi is the all-time La Liga top scorer, as well as having the most goal contributions (goals + assists) in the history of the top European leagues, while being the UEFA Champions League all-time top goalscorer and assist provider. Ronaldo is the UEFA Champions League all-time top goalscorer and assist provider, with Messi in second for both goals scored and assists provided in the competition's history, although once more having a superior ratio of goal contributions to games.\n", + "\n", + "Complex: Messi also holds both the most goals in a season (73), and second most assists in a season (34), just one behind midfielder Juan Mata's record. \n", + "Simple: Messi also holds the record for most goals in a season with 73. He also holds the record for most assists with 34.\n", + "\n", + "Complex: He is the outright holder of most goal contributions in a season, with 107 in 2011–12. \n", + "Simple: He has scored the most goals in a single season with 107 in the 2011 â \"12 season.\n", + "\n", + "Complex: The two had broken each other's record over the course of 2015 after Messi surpassed the previous record holder Raúl in November 2014. \n", + "Simple: The two had broken each other's record over the course of 2015. In November 2014, Messi beat the previous record holder.\n", + "\n", + "Complex: Ronaldo opened a gap in the 2015–16 season when he became the first player to score double figures in the group stage of the Champions League, setting the record at 11 goals. \n", + "Simple: In the 2015/16 season, he became the first player in history to score double figures in the Champions League group stage, setting the record at 11 goals.\n", + "\n", + "Complex: They are the first two players to score 100 goals in UEFA Champions League history.They dominated the Ballon d'Or/The Best FIFA Men's Player awards since 2008, and UEFA Men's Player of the Year Award since 2014; in 2018, their roughly decade-long triumph was ended by Luka Modrić, seen as \"the end of an era.\" \n", + "Simple: They are the first two players to score 100 goals in UEFA Champions League history. They have won the Ballon d'Or/The Best FIFA Men's Player awards since 2008, and UEFA Men's Player of the Year Award since 2014; Luka Modrić won the award for the first time in his career in 2018.\n", + "\n", + "Complex: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. \n", + "Simple: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo.\n", + "\n", + "Complex: Nobody has the right to compare themselves to them.\" \n", + "Simple: No one should compare themselves to them.\"\n", + "\n", + "Complex: Since 2008, Messi has won six Ballons d'Or to Ronaldo's five, six FIFA World's Best Player awards to Ronaldo's five, and six European Golden Shoes to Ronaldo's four.Collectively, Ronaldo has won 32 major trophies including seven league titles and five Champions Leagues, and guided Portugal to their first trophies UEFA Euro 2016 and the 2019 UEFA Nations League. \n", + "Simple: Since 2008, Messi has won six Ballons d'Or to Ronaldo's five, six FIFA World's Best Player awards to Ronaldo's five, and six European Golden Shoes to Ronaldo's four. In that time, Messi has won seven league titles and five Champions Leagues, while Ronaldo has won seven league titles and five Champions Leagues.\n", + "\n", + "Complex: He has also won four national cups, two league cups, six national super cups, two European Super Cups, and four FIFA Club World Cups.Messi has won a club record 34 major trophies as a Barcelona player, including ten league titles, four Champions Leagues, seven Copa del Rey titles, seven Spanish Super Cups, three European Super Cups, and three FIFA Club World Cups. \n", + "Simple: Messi has won a club record 34 major trophies as a Barcelona player, including ten league titles, four Champions Leagues, seven Copa del Rey titles, seven Spanish Super Cups, three European Super Cups, and three FIFA Club World Cups. He has also won four national cups, two league cups, six national super cups, and four FIFA Club World Cups.\n", + "\n", + "Complex: As for youth competitions, Messi won a gold medal at the 2008 Olympic Games held in China, and he had previously won the FIFA U-20 World Cup in 2005, being elected best player and top scorer of the tournament. \n", + "Simple: In 2008, Messi won a gold medal at the Olympic Games held in China, and in 2005, he won the FIFA U-20 World Cup. Messi was the best player at the 2008 Olympic Games, and he won the FIFA U-20 World Cup.\n", + "\n", + "Complex: Moreover, Messi was runner-up at three Copa Américas and at the 2014 World Cup, before finally claiming his first major international trophy at the 2021 Copa América where he was named joint-best player and top scorer. \n", + "Simple: In addition, Messi was runner-up at three Copa Américas and at the 2014 World Cup. At the 2021 Copa América, Messi was named joint-best player and top scorer, and he won his first major international trophy.\n", + "\n", + "Complex: In past years, it had been said that the rivalry between Real Madrid and Barcelona has been \"encapsulated\" by the individual rivalry between Ronaldo and Messi, with there never being a goalless El Clásico between the two. \n", + "Simple: In past years, it had been said that the rivalry between Real Madrid and Barcelona is the best in the world. This is because Messi and Ronaldo have always played against each other in El Clásico.\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7A_XJop0_71z" + }, + "source": [ + "#####**Simplified Wikipedia Article**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "Jms42MMFdZJj", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "b3a42f97-9fba-49d6-ab14-684bb2a5d041" + }, + "source": [ + "cur_para, cur_sen = 0, 0\n", + "simplified_article = page.original_title + '\\n\\n'\n", + "\n", + "for lth in sen_length:\n", + " simplified_article += (list(complex_sentences_dict.keys()))[cur_para] + '\\n\\n'\n", + " for i in range(lth):\n", + " simplified_article += simple_sentences[cur_sen+i] + ' '\n", + " simplified_article += '\\n\\n'\n", + "\n", + " cur_para += 1\n", + " cur_sen += lth\n", + "\n", + "print(simplified_article)" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "Messi–Ronaldo rivalry\n", + "\n", + "== Introduction ==\n", + "\n", + "The Messi-Ronaldo rivalry is a football rivalry between two very good football players: Cristiano Ronaldo and Lionel Messi. Many people think that both Messi and Ronaldo are the best players in the world today. Both have won the Ballon d'Or many times, six for Messi and five for Ronaldo, as well as many other awards, such as the European Golden Shoe. They are two of the most successful football players ever, having won a combined 66 trophies. Messi has won 34 trophies in his senior career, and Ronaldo has won 32 trophies in his senior career. They are the only two players in the history of the sport to score 500 or more goals in a single season. They have both scored over 700 goals in their careers. Ronaldo currently holds the world record for most official goals in a career, while Messi has the superior goals-to-games ratio (0.81, to Ronaldo's 0.73). Many people think that Ronaldo is the best player in the world today, while some people think that Messi is the best player ever. No matter what people think, football people generally agree that both are the greatest players of their time. Many people think they are the best players of their time. Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's leadership and influence under pressure is well-appreciated worldwide. It has been compared to past global sports rivalries such as the Muhammad Ali-Joe Frazier rivalry in boxing, the Alain Prost-Ayrton Senna rivalry in Formula One motor racing, the Björn Borg-John McEnroe rivalry in tennis, and the Roger Federer-Rafael Nadal rivalry in tennis. Some commentators choose to analyse the differing physiques and playing styles of the two, while part of the debate revolves around the contrasting personalities of the two players; Messi is often described as a player with a strong personality, while Ronaldo is considered to have a more reserved character. The two players play against each other at least twice every season in the world's most popular regular-season club game, El Clásico. Since Ronaldo joined Madrid in 2009, they have played against each other at least twice every season. in 2018. Off the field, they are also the face of two rival sportswear manufacturers, Adidas and Nike. Adidas and Nike both supply the national teams of both Messi and Ronaldo. The two highest-paid players in football, Messi and Ronaldo are among the world's best-paid sports stars. They both make a lot of money from their jobs. In 2018, Messi made $111 million on the Forbes list of the world's most paid sportspeople. Ronaldo made $108 million on the list. \n", + "\n", + "== History ==\n", + "\n", + "In 2007, Ronaldo and Messi finished as runners-up to A.C. Milan's Kaká in both the Ballon d'Or and the FIFA World Player of the Year. The Ballon d'Or is an award given to the best player in the world by an international group of sports journalists. In an interview that year, Messi said that he would like to play with Ronaldo. He said that \"Cristiano Ronaldo is an extraordinary player. They first played against each other when Manchester United were playing Barcelona in the UEFA Champions League in 2007. They played against each other for the first time in this game. In the first leg, Ronaldo missed a penalty. In the end, Paul Scholes scored United's goal. At the end of the year, Ronaldo was awarded the Ballon d'Or and said that he would win the award again. The 2009 UEFA Champions League Final was played on May 27, 2009 at the Stadio Olimpico in Rome, Italy. The match, described as a \"dream clash\", was again hyped as the latest battle between the two, with Messi's club-mate Xavi saying that Ronaldo was the better player, while Ronaldo said that he was the best player in the world. Manchester United manager Alex Ferguson thought that both players were very good. He said that they were the best in the world. Messi, playing in a central role so he would not have to fight with Manchester United left-back Patrice Evra, scored Barcelona's second goal in a 2-0 victory. He scored the goal in the 70th minute. Meanwhile, Ronaldo was subdued for much of the game, despite some early chances to score, and his frustration eventually showed when he was booked for a rash tackle on Carles Puyol. The two have played against each other at least twice a season since the beginning of the 2009\\/10 season, including a two-legged Champions League semifinal in 2011. Messi has won the Ballon d'Or four times in a row (2009 to 2012), while Ronaldo has won it five times. Ronaldo won the award in 2013, 2014, 2016, and 2017. In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third. In total, Messi and Ronaldo have both won the World Cup twelve times. While Ronaldo was playing for Real Madrid, Messi won two Champions League games. Real Madrid have won four out of five titles since 2014. During this time, Messi has won five Pichichi trophies and three European Golden Shoe awards. Ronaldo has won these awards three times, once each in 2011, 2014 and 2015. From 2009 to 2018, Ronaldo has scored more goals in the Champions League than Messi four times. In 2015, the two players finished as the top scorers together. \n", + "\n", + "== Relationship between Messi and Ronaldo ==\n", + "\n", + "In a 2015 interview, Ronaldo talked about the rivalry. He said: \"Sometimes we push each other a little bit, that's why the competition is so good. Alex Ferguson, who managed Ronaldo when he was at Manchester United, said that he does not care about the other players. I think they have their own personal pride. They want to be the best at it.\" Messi has denied any rivalry, and blames the media for creating it. He said that \"only the media, the press, wants us to be at loggerheads (not playing against each other). \"It is widely argued and documented that there is an atmosphere of competition between the duo, with Luca Caioli saying in his book Ronaldo: The Obsession for Perfection that, according to his sources, Ronaldo heats up when watching Messi play, and Guillem Balagué claiming in the book that he refers to his Argentine counterpart as a \"motherfucker\" behind his back. He has said that he and Messi do not get on well on a personal level, and that they do not have a good relationship outside of football. He said that they do not have a good relationship with a lot of other players, and that in years to come he hopes they can laugh about it together. On 13 November 2014, Ronaldo also said that he would take legal action against Balagué. After Ronaldo left Real Madrid to join Juventus, Messi said that he \"missed\" him, \"I miss Cristiano\". It was a little hard for him to win trophies, but he gave La Liga importance.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\" \n", + "\n", + "== Awards and records ==\n", + "\n", + "Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken many goalscoring records for both club and country. People have said that they are both \"incredible\" and \"remarkable\" for having scored so many goals. Messi is the all-time La Liga top scorer, as well as having the most goal contributions (goals + assists) in the history of the top European leagues, while being the UEFA Champions League all-time top goalscorer and assist provider. Ronaldo is the UEFA Champions League all-time top goalscorer and assist provider, with Messi in second for both goals scored and assists provided in the competition's history, although once more having a superior ratio of goal contributions to games. Messi also holds the record for most goals in a season with 73. He also holds the record for most assists with 34. He has scored the most goals in a single season with 107 in the 2011 â \"12 season. The two had broken each other's record over the course of 2015. In November 2014, Messi beat the previous record holder. In the 2015/16 season, he became the first player in history to score double figures in the Champions League group stage, setting the record at 11 goals. They are the first two players to score 100 goals in UEFA Champions League history. They have won the Ballon d'Or/The Best FIFA Men's Player awards since 2008, and UEFA Men's Player of the Year Award since 2014; Luka Modrić won the award for the first time in his career in 2018. In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\" Since 2008, Messi has won six Ballons d'Or to Ronaldo's five, six FIFA World's Best Player awards to Ronaldo's five, and six European Golden Shoes to Ronaldo's four. In that time, Messi has won seven league titles and five Champions Leagues, while Ronaldo has won seven league titles and five Champions Leagues. Messi has won a club record 34 major trophies as a Barcelona player, including ten league titles, four Champions Leagues, seven Copa del Rey titles, seven Spanish Super Cups, three European Super Cups, and three FIFA Club World Cups. He has also won four national cups, two league cups, six national super cups, and four FIFA Club World Cups. In 2008, Messi won a gold medal at the Olympic Games held in China, and in 2005, he won the FIFA U-20 World Cup. Messi was the best player at the 2008 Olympic Games, and he won the FIFA U-20 World Cup. In addition, Messi was runner-up at three Copa Américas and at the 2014 World Cup. At the 2021 Copa América, Messi was named joint-best player and top scorer, and he won his first major international trophy. \n", + "\n", + "== Head-to-head ==\n", + "\n", + "In past years, it had been said that the rivalry between Real Madrid and Barcelona is the best in the world. This is because Messi and Ronaldo have always played against each other in El Clásico. \n", + "\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0-Xub2u-AZwW" + }, + "source": [ + "#####**Simplified + Summarized Article**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "7o7ZRZFsAY8p", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 543, + "referenced_widgets": [ + "c4768b71c55445f092c33d0d9b197834", + "77418127c4654006a6610af8722b4f98", + "977ad2096048427ab9d3c7c567d311f3", + "fcb0e031854446af94b2f29bd1d6710d", + "ccf5379321ec4eeeb2ccf0a014737889", + "85d2c4b4c4a14bcaabf06c2899acc41e", + "9cf7a6867acb4d3a93611932b9704d4e", + "5af848fb9265434c93693ae6dd7e5ac1", + "3a59e8f2a68a406fba8a15a8a54e62a3", + "70d73496167a433ca9e142e28fbb7596", + "0ef209dc3b3d454fae7325d0ddff7240", + "f29ee09134c24e63bb9c6a1ea65f3e6e", + "acf2fbcdc6dd4b1ba5de8908eac46199", + "1c3db3e73d924a6990cd10ebfc2665d2", + "a4a56f6446074adfa1b11bbcb34e3d22", + "f7dc8ad7682f43e4a8cc5f3c3442cb01", + "4e8374fa45104f0eb48209c3e573cec5", + "64c4c18fcf9d41c4a0854d95bff4ebef", + "ea74a17e91234cb1a803d9de41384ca4", + "0c8133e3342b4f649cfba7d8e3795232", + "cf9d482e115c48cf861c22c88201b238", + "11155ce2cd7141e394fd874e213efec7", + "7e961ac3509f4628bdfdbc7fc95d74de", + "61a80c0b95264b8aa78518d988419ae4", + "fff9f1c3802b477195c738a86e4afd48", + "c2cab65440a742cf8aa268fcb839fd95", + "7ea8106314e94965ab142c5fe945dfad", + "eb31c9e0621d4f029ff12b3b085b4adb", + "24ea13fff6084c41a97b191e38c4a936", + "95e097a72dd9430fa4990d8125f3aaf7", + "c1beffc7e319462194f55af137816064", + "783c049c65db4d958dd770c828869968", + "ee16cce8e6444c138326ce2cbc7f4431", + "a02e3162567847668f976f1e865506fd", + "65c2bacb18b24862955d1292704caa83", + "31a297d11a4743c5a07848d55194213e", + "c395b5aaed7348119db6d55d03118dd2", + "6b88509078be4a7fbd565dd772144ea1", + "53054592ddfe40d3b18546ce3ef29eb1", + "b5cad8be41d34a33b6455763477c5afc", + "f26727ff8e564530947c62a8d54aa2db", + "4d0999f7d88b4405b97e3a5bba72fe80", + "d1556920ef6f474a975a6672ba311b49", + "dbbed27ac93a4e79a06bad53e775484e", + "d782e1075f2b47f78d43173231f78e8e", + "da051dda894240e7ada9b38a2ee83391", + "333a987a9a9c44d38d597a9605f9a228", + "0bab7b0fb741464eba9c3c17449ce7d3", + "213a2195a4f04c74973ef8961c9b45fe", + "4d4a4b87e88d475b971a8a1c2e517a14", + "a519ea9882e54227848a48fe085c8a91", + "98d396a455b54f24943ff39d82373290", + "48d0ce68c7674e28b14e7791e9a9dc75", + "01b7da4567b047ee9f4df9ec405aa46b", + "21321dd78f5047bb913ca765a437433e" + ] + }, + "outputId": "b2a5bcff-f2ac-4e48-fef5-30f6841626f4" + }, + "source": [ + "from transformers import pipeline\n", + "summarizer = pipeline('summarization')" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "INFO:filelock:Lock 140182548512848 acquired on /root/.cache/huggingface/transformers/adac95cf641be69365b3dd7fe00d4114b3c7c77fb0572931db31a92d4995053b.a50597c2c8b540e8d07e03ca4d58bf615a365f134fb10ca988f4f67881789178.lock\n" + ], + "name": "stderr" + }, + { + "output_type": "display_data", + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "c4768b71c55445f092c33d0d9b197834", + "version_minor": 0, + "version_major": 2 + }, + "text/plain": [ + "Downloading: 0%| | 0.00/1.80k [00:00pre {white-space: pre-wrap;}" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "Messi–Ronaldo rivalry\n", + "\n", + " The Messi-Ronaldo rivalry is a football rivalry between two very good football players . Many people think that both Messi and Ronaldo are the best players in the world today . Messi has won 34 trophies in his senior career, while Ronaldo has won 32 . The two players play against each other at least twice every season in El Clásico . The rivalry has been compared to past global sports rivalries such as the Muhammad Ali-Joe Frazier rivalry in boxing and the Alain Prost-Ayrton Senna rivalry in Formula One . Messi and Ronaldo finished as runners-up to Kaká in both the Ballon d'Or and FIFA World Player of the Year in 2007 . The two have played against each other at least twice a season since the beginning of the 2009\\/10 season, including a two-legged Champions League semifinal in 2011 . Messi has won the award four times in a row (2009 to 2012), while Ronaldo has won it five times . In 2018, Messi made $111 million on the Forbes list of the world's most paid sportspeople . In 2019, Messi took the lead again by earning Cristiano Ronaldo has scored more goals in the Champions League than Lionel Messi four times from 2009 to 2018 . In 2015, the two players finished as the top scorers together . Messi has denied any rivalry, and blames the media for creating it . In a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\" The pair have dominated awards ceremonies and broken many goalscoring records for both club and country . \n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "YwxHQpD94hOI" + }, + "source": [ + "---\n", + "### Readability score calculation" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "maHGOcTw5KRm", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 190 + }, + "outputId": "0c5f1489-46c8-4088-f3bb-92b65b4c6f6c" + }, + "source": [ + "!pip install textstat" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "Collecting textstat\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/ce/42/3e3691ff23d2f3e30ef18bd382d9450e992e2da7e01ca33d392b473eba05/textstat-0.7.1-py3-none-any.whl (99kB)\n", + "\r\u001b[K |███▎ | 10kB 21.6MB/s eta 0:00:01\r\u001b[K |██████▋ | 20kB 19.5MB/s eta 0:00:01\r\u001b[K |█████████▉ | 30kB 16.7MB/s eta 0:00:01\r\u001b[K |█████████████▏ | 40kB 15.1MB/s eta 0:00:01\r\u001b[K |████████████████▍ | 51kB 7.8MB/s eta 0:00:01\r\u001b[K |███████████████████▊ | 61kB 9.1MB/s eta 0:00:01\r\u001b[K |███████████████████████ | 71kB 9.5MB/s eta 0:00:01\r\u001b[K |██████████████████████████▎ | 81kB 9.8MB/s eta 0:00:01\r\u001b[K |█████████████████████████████▌ | 92kB 10.1MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 102kB 6.7MB/s \n", + "\u001b[?25hCollecting pyphen\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/77/95/b98d82fbfcd33f34a12d3d1eeb127e57d9dc5a272b6f08f4e889766d8b3a/pyphen-0.11.0-py3-none-any.whl (2.0MB)\n", + "\u001b[K |████████████████████████████████| 2.0MB 34.0MB/s \n", + "\u001b[?25hInstalling collected packages: pyphen, textstat\n", + "Successfully installed pyphen-0.11.0 textstat-0.7.1\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "MS0Bro8U5Dup", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 415 + }, + "outputId": "da0aeb5e-1904-46b2-ffa7-38722149092c" + }, + "source": [ + "from textstat import smog_index, flesch_reading_ease, flesch_kincaid_grade, coleman_liau_index \n", + "from textstat import automated_readability_index, dale_chall_readability_score, gunning_fog\n", + "\n", + "print('Readability Index : Complex - Simple\\n')\n", + "\n", + "print('Flesch Reading Ease(+) : ', flesch_reading_ease(complex_text), \n", + " flesch_reading_ease(simple_text))\n", + "print('Flesch Kincaid Grade(-) : ', flesch_kincaid_grade(complex_text), flesch_kincaid_grade(simple_text))\n", + "print('Coleman Liau Index(-) : ', coleman_liau_index(complex_text), coleman_liau_index(simple_text))\n", + "print('Automated Readability Index(-) : ', automated_readability_index(complex_text),\n", + " automated_readability_index(simple_text))\n", + "print('Gunning Fog(-) : ', gunning_fog(complex_text), gunning_fog(simple_text))\n", + "print('Smog Index(-) : ', smog_index(complex_text), smog_index(simple_text))\n", + "print('Dale-Chall Readability Score(-) : ', dale_chall_readability_score(complex_text), \n", + " dale_chall_readability_score(simple_text))\n", + "\n", + "print('\\n---\\n\\n(+) : higher => better;\\n\\t90-100 : Very Easy\\n\\t80-90 : Easy'\n", + " '\\n\\t70-80 : Fairly Easy\\n\\t60-70 : Standard\\n\\t50-60 : Fairly Difficult'\n", + " '\\n\\t30-50 : Difficult\\n\\t0-30 : Very Confusing\\n')\n", + "print('(-) : lower => better;\\nThis is a grade formula in that a '\n", + " 'score of 9.3 means that a ninth grader would be able to read the document.')" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "Readability Index : Complex - Simple\n", + "\n", + "Flesch Reading Ease(+) : 42.58 68.3\n", + "Flesch Kincaid Grade(-) : 16.5 8.7\n", + "Coleman Liau Index(-) : 11.38 9.23\n", + "Automated Readability Index(-) : 20.6 10.8\n", + "Gunning Fog(-) : 17.15 9.54\n", + "Smog Index(-) : 14.4 10.1\n", + "Dale-Chall Readability Score(-) : 7.7 6.13\n", + "\n", + "---\n", + "\n", + "(+) : higher => better;\n", + "\t90-100 : Very Easy\n", + "\t80-90 : Easy\n", + "\t70-80 : Fairly Easy\n", + "\t60-70 : Standard\n", + "\t50-60 : Fairly Difficult\n", + "\t30-50 : Difficult\n", + "\t0-30 : Very Confusing\n", + "\n", + "(-) : lower => better;\n", + "This is a grade formula in that a score of 9.3 means that a ninth grader would be able to read the document.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "8YnSkH-pITma", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 34 + }, + "outputId": "017e0302-7ef3-4f61-be7f-f9fa9eda35f8" + }, + "source": [ + "print('Flesch Reading Ease(+) for the summarised article: ', flesch_reading_ease(summarized_text))" + ], + "execution_count": null, + "outputs": [ + { + "output_type": "display_data", + "data": { + "text/html": [ + "" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "Flesch Reading Ease(+) : 58.42\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "DnxGYqvbIX7n" + }, + "source": [ + "" + ], + "execution_count": null, + "outputs": [] + } + ] +} \ No newline at end of file diff --git a/Final model/Team_4/simple_wiki.txt b/Final model/Team_4/simple_wiki.txt new file mode 100644 index 0000000..5580f0b --- /dev/null +++ b/Final model/Team_4/simple_wiki.txt @@ -0,0 +1,291 @@ +Lionel Andrés "Leo" Messi Cuccittini (Spanish pronunciation: [ljoˈnel anˈdɾes ˈmesi]) (born 24 June 1987) is an Argentine footballer.[5] He plays for the Argentina national team as a forward. He is currently a free agent, as his contract at FC Barcelona expired. He also has Spanish citizenship, meaning he can play for Spain. Many experts and critics consider Messi as one of the greatest football players of all time. Some[who?] even say he is the greatest player ever. Messi has six Ballon d'Or awards, the most of any player, and one more than Cristiano Ronaldo.[6][7] His playing style and skills are very similar to the Argentine legend Diego Maradona because of their short height. There is much competition between him and Portuguese footballer Cristiano Ronaldo because of their similar skill levels. +Messi was born and raised in Rosario, Argentina. Messi is the third of four children of Jorge Messi and his wife Celia Cuccittini. He has two brothers named Rodrigo and Matias and two cousins named Maximiliano and Emanuel. + +Messi's grandmother died when he was 11, and after that he generally celebrates his goals by pointing up in the sky in tribute of his grandmother. + +Messi began football at a young age and his potential was quickly seen by Barcelona. He left Newell's Old Boys' youth team in September 2000 and moved with his family to Europe in February 2001. He officially signed with Barcelona in December 2000 on a napkin. They moved to Spain because Barcelona offered their help to treat his growth hormone deficiency, and Newell's didn't offer any help.[8] + +Messi played his first professional match at 16 years old on 16 November 2003 against Porto. He played the 2003-04 season with Barcelona B. He was promoted to the first team for the 2004-05 season, and made his league debut on 16 October 2004 against Espanyol as a substitute. He made his league debut at age 17, and became the youngest player to play for Barcelona's first team in an official competition. He scored his first professional goal on 1 May 2005 against Albacete from an assist by Ronaldinho. The goal made him the youngest-ever scorer for the club at that time. Barcelona won La Liga that season for the first time in 6 years, and won the league for a second time in a row along with the Spanish Super Cup and UEFA Champions League in 2006. + +His first breakthrough season was in the 2006–07 season; he became a first team regular by scoring his first hat-trick of his career in El Clásico. On 18 April 2007, he scored a goal similar to Maradona's "Goal of the Century" against England in the 1986 World Cup, where Maradona got the ball behind the halfway line on the right side and beat 4 defenders and the goalie to score. Messi's goal was similar to this; he received a pass from Xavi on the right side behind half-field, and then megged an opponent and 4 others including the goalie before finishing off with his right foot inside the penalty box.[9] In 2019, Barcelona fans voted it as the best goal in the club's history, receiving 45% of votes.[10] After Ronaldinho left the club at the end of the 2007-08 season, Messi was handed the number 10 shirt. Maybe his most successful season was the 2008–09 season. In this season, Messi scored 38 goals, including one in the Champions League final against Manchester United that Barcelona won 2–0 as part of their treble. In the following 2009–10 season, Messi scored 47 goals in all games. That equals to Ronaldo's (Brazilian) record total for Barcelona. Messi also won his first Ballon d'Or in December 2009, and also won his second a year later. He scored again in the 2011 Champions League final against their same opponent two years earlier, Manchester United. Barcelona won 3-1. Messi won his third Ballon d'Or in a row that year. + +Messi played his first game with the senior team on 17 August 2005 against Hungary. He came on as a substitute. However, he was shown a red card two minutes later for hitting an opponent in the face on accident while Messi was trying to pull the opponent off from his shirt. He was very disappointed by this. Messi scored his first goal for Argentina in a friendly against Croatia on 1 March 2006. Argentina lost the game 3-2.[11] He was selected for the 2006 World Cup by coach José Pekerman. In Argentina's second game against Serbia, he became their youngest player to play at a World Cup. He also assisted a goal scored the last one in the 6–0 victory. That goal made him the youngest goalscorer in the 2006 World Cup. + +Messi won the 2008 Olympics with Argentina as well, as he assisted Ángel Di Maria's goal in the final against Nigeria in Beijing. Messi was made captain of the national team in 2011. He scored his first hat-trick with the country in his 68th appearance during a 3-1 victory against Switzerland in February 2012.[12] + +On 21 June 2016, he broke the top scoring record for Argentina by scoring his 55th goal with a free-kick against the United States in the Copa América Centenario. The record was held by Gabriel Batistuta with 54 goals. +On 26 June, Argentina once again lost to Chile on penalties 4-2 in the Copa América Centenario final after a 0–0 tie. Messi missed his penalty in the penalty shootout. This was Messi's third consecutive defeat in a major tournament final with Argentina, and his fourth overall. A day after the match, Messi announced his retirement from international football. In an emotional interview after the game, he said he was very sad that he missed the penalty and also sad that he wasn't able to win any trophies in four finals.[13][14] + +Return from Retirement +He came out of retirement in October 2016 for the 2018 World Cup qualifiers. + +After a slow start to the 2018 FIFA World Cup, he helped Argentina qualify to the next round of the competition by scoring the first goal in the 2-1 win over Nigeria. + +Personal life +Relationships +Messi has three children with his wife Antonella Roccuzzo. They are named Thiago Messi, Mateo Messi and Ciro Messi. Thiago was born on 2 November 2012, Mateo was born on 11 September 2015 and Ciro was born on 10 March 2018. On the bottom of Messi's cleats is the name of his first son, Thiago. + +Messi and Antonella married in June 2017.[15] + +Charity +When not playing on the field, Messi finds time to do charity work. He started the Leo Messi Foundation in 2007 which helps children in at-risk situations. Messi also opened a kids park for children living in a hospital in Barcelona. + +Tax Issues +In July 2016 he was convicted of tax fraud and sentenced to spend 21 months in prison. Messi was also fined two million Euros.[16] Messi never went to prison, and the problem can be solved with probation. He told the court he "only played football" and didn't know anything because he left his money problems to be dealt with by his father, Jorge Messi. + +Ronaldo (born 18 September 1976) is a Brazilian football player.He has played for Brazil national team. Ronaldo started his career at Cruzeiro and moved to PSV in 1994. He joined Barcelona in 1996 + +Chernobyl or Chornobyl (Ukrainian: Чорнобиль) is a city in northern Ukraine, near the border with Belarus. + +It was a major communications node and important center of trade and commerce, especially in the 19th century. The city is 14.5 kilometers (9 miles) south by south-east of the Chornobyl nuclear power plant, which became known for the Chornobyl disaster. The plant exploded on 26 April 1986; clouds of radioactive particles were released, and the severely damaged containment vessel started leaking radioactive matter. More than 100,000 people were evacuated from the city and other affected areas. Despite the fact that radiation is still being emitted from the nuclear disaster site, the 800-year-old city of Chornobyl survives. As of 2004, government workers still police the zone. They try to clean up radioactive material. Hundreds of people — mostly the elderly — have decided to live with the dangers and have returned to their homes in the zone's towns and villages. Their population was highest in 1987, when there were more than 1200 people. In 2003, there were about 300. There are currently 704 people living in Chornobyl, the houses that have people inside have a sign on the outside saying, Owner of this house lives here. + +Artificial intelligence (AI) [1]is the ability of a computer program or a machine to think and learn. It is also a field of study which tries to make computers "smart". They work on their own without being encoded with commands. John McCarthy came up with the name "Artificial Intelligence" in 1955. + +In general use, the term "artificial intelligence[1]" means a programme which mimics human cognition. At least some of the things we associate with other minds, such as learning and problem solving can be done by computers, though not in the same way as we do.[2] Andreas Kaplan and Michael Haenlein define AI as a system’s ability to correctly interpret external data, to learn from such data, and to use those learnings to achieve specific goals and tasks through flexible adaptation.[3] + +An ideal (perfect) intelligent machine is a flexible agent which perceives its environment and takes actions to maximize its chance of success at some goal or objective.[4] As machines become increasingly capable, mental faculties once thought to require intelligence are removed from the definition. For example, optical character recognition is no longer perceived as an example of "artificial intelligence": it is just a routine technology. + +At present we use the term AI for successfully understanding human speech,[2] competing at a high level in strategic game systems (such as Chess and Go), self-driving cars, and interpreting complex data.[5] Some people also consider AI a danger to humanity if it continues to progress at its current pace.[6] + +An extreme goal of AI research is to create computer programs that can learn, solve problems, and think logically.[7][8] In practice, however, most applications have picked on problems which computers can do well. Searching data bases and doing calculations are things computers do better than people. On the other hand, "perceiving its environment" in any real sense is way beyond present-day computing. + +AI involves many different fields like computer science, mathematics, linguistics, psychology, neuroscience, and philosophy. Eventually researchers hope to create a "general artificial intelligence" which can solve many problems instead of focusing on just one. Researchers are also trying to create creative and emotional AI which can possibly empathize or create art. Many approaches and tools have been tried. + +Borrowing from the management literature, Kaplan and Haenlein classify artificial intelligence into three different types of AI systems: analytical, human-inspired, and humanized artificial intelligence.[3] Analytical AI has only characteristics consistent with cognitive intelligence generating cognitive representation of the world and using learning based on past experience to inform future decisions. Human-inspired AI has elements from cognitive as well as emotional intelligence, understanding, in addition to cognitive elements, also human emotions considering them in their decision making. Humanized AI shows characteristics of all types of competencies (i.e., cognitive, emotional, and social intelligence), able to be self-conscious and self-aware in interactions with others. + +History +Humanoid robots were built by Yan Shi, Hero of Alexandria, and Al-Jazari. Sentient machines became popular in fiction during the 19th and 20th centuries with the stories of Frankenstein and Rossum's Universal Robots. + +Formal logic was developed by ancient Greek philosophers and mathematicians. This study of logic produced the idea of a computer in the 19th and 20th centuries. Mathematician Alan Turing's theory of computation said that any mathematical problem could be solved by processing 1's and 0's. Advances in neurology, information theory, and cybernetics convinced a small group of researchers that an electronic brain was possible. + +AI research really started with a conference at Dartmouth College in 1956. It was a month-long brainstorming session attended by many people with interests in AI. At the conference they wrote programs that were amazing at the time, beating people at checkers or solving word problems. The Department of Defense started giving a lot of money to AI research and labs were created all over the world. + +Unfortunately, researchers really underestimated just how hard some problems were. The tools they had used still did not give computers things like emotions or common sense. Mathematician James Lighthill wrote a report on AI saying that "in no part of the field have discoveries made so far produced the major impact that was then promised".[9] The U.S and British governments wanted to fund more productive projects. Funding for AI research was cut, starting an "AI winter" where little research was done. + +AI research revived in the 1980s because of the popularity of expert systems, which simulated the knowledge of a human expert. By 1985, 1 billion dollars were spent on AI. New, faster computers convinced U.S. and British governments to start funding AI research again. However, the market for Lisp machines collapsed in 1987, and funding was pulled again, starting an even longer AI winter. + +AI revived again in the 90s and early 2000s with its use in data mining and medical diagnosis. This was possible because of faster computers and focusing on solving more specific problems. In 1997, Deep Blue became the first computer program to beat chess world champion Garry Kasparov. Faster computers, advances in deep learning, and access to more data have made AI popular throughout the world.[10] In 2011 IBM Watson beat the top two Jeopardy! players Brad Rutter and Ken Jennings, and in 2016 Google's AlphaGo beat top Go player Lee Sedol 4 out of 5 times. + +Hinduism is not only a religion but also a way of life.[note 1] Hinduism is known to have more than one god and is widely practiced in South Asia mainly in India and Nepal. Hinduism is the oldest religion in the world,[note 2] and Hindus refer to it as Sanātana Dharma, "the eternal tradition," or the "eternal way," beyond human history.[4][5] Scholars regard Hinduism as a combination[6] of different Indian cultures and traditions,[7] with diverse roots.[8][note 3] Hinduism has no founder and origins of Hinduism is unknown.[9]. Hinduism has roots in Indus Valley civilization.[10] There was no concept of religion in India and Hinduism was not a religion. Hinduism as a religion started to develop between 500 BCE and 300 CE,[11] after the Vedic period (1500 BCE to 500 BCE).[11][12] Hinduism contains a wide range of philosophies, and is linked by the concepts, like rituals, cosmology, Texts, and pilgrimage to sacred sites. Hindu texts are divided into Śruti ("heard") and Smṛti ("remembered"). These texts discuss philosophy, mythology, Vedic yajna, Yoga, agamic rituals, and temple building, and many more.[13] Major scriptures in Hinduism include the Vedas and Upanishads, the Bhagavad Gita, and the Agamas.[14][15][16] + +There are 4 goals or aims of human life, namely Dharma (duties), Artha (prosperity), Kama (desires/passions), Moksha (liberation/freedom/salvation);[17][18] karma (action, intent and consequences), Saṃsāra (cycle of rebirth), and the various Yogas (paths or practices to attain moksha).[15][19] Hindu rituals include puja (worship) and recitations, meditation, family-oriented rites of passage, annual festivals, and occasional pilgrimages. Some Hindus leave their social world and become sanyasi to achieve Moksha.[20] Hinduism prescribes the eternal duties, such as honesty, non-violence (ahimsa), patience, self-restraint, and compassion, among others.[21][22] The four largest sects of Hinduism are the Vaishnavism, Shaivism, Shaktism and Smartism.[23] + +Hinduism is the world's third largest religion, There are approximately 1.15 billion Hindus which are 15-16% of the global population.[web 1][24] The vast majority of Hindus live in India, Nepal and Mauritius. Hindus are also found in other countries. + +Etymology +The word Hindu is taken from the Indo-Aryan[27]/Sanskrit[28] word Sapta Sindhu, which is Sanskrit name for the Indus River which lies west of the border of India and Pakistan.[28][note 4] According to Gavin Flood, The word Hindu was used by Persians for the people who live beyond the Indus River,[28] Inscription of Darius I which was written around 550–486 BCE also refer Hindu as the people who live beyond the Indus River.[29] These records didn't refer Hindu as a religion.[28] The earliest record which refer Hindu as religion may be the 7th-century CE Chinese text Record of the Western Regions by Xuanzang,[29] and 14th-century Persian text Futuhu's-salatin by 'Abd al-Malik Isami.[note 5] This is because the name if the religion is called Hinduism, not Hindu. + +The Arabic term al-Hind referred to the people who live across the River Indus.[37] It means "Land of Hindus" which is what it stood for until Muslims started to come into India. + +The term Hindu was later used in some Sanskrit texts such as the later Rajataranginis of Kashmir (Hinduka, c. 1450) and some 16th- to 18th-century Bengali Gaudiya Vaishnava texts including Chaitanya Charitamrita and Chaitanya Bhagavata. These texts used to distinguish Hindus from Muslims who are called Yavanas (foreigners) or Mlecchas (barbarians), with the 16th-century Chaitanya Charitamrita text and the 17th century Bhakta Mala text using the phrase "Hindu dharma".[38] In the end of the 18th century the European merchants and colonists began to call followers of Indian religions collectively as Hindus. The term Hinduism, then spelled Hindooism, was introduced into the English language in the 18th-century to denote the religious, philosophical, and cultural traditions native to India.[39] + +Definitions +Hinduism is diverse on ideas on spirituality and traditions, but has no ecclesiastical order, no unquestionable religious authorities, no governing body, no prophet(s) nor any binding holy book; Hindus can choose to be polytheistic, pantheistic, monotheistic, monistic, agnostic, atheistic or humanist.[40][41][42] Because of the wideness and openness of Hinduism, arriving at a definition is difficult.[28] [43] Hinduism has been defined as a religion, a religious tradition, a set of religious beliefs, and "a way of life."[44][note 1] From a Western point of view, Hinduism like other faiths is referred to as a religion. In India the term dharma is preferred, which is broader than the western term religion. + +The study of India and its cultures and religions, and the definition of "Hinduism", has been shaped by the interests of colonialism and by Western notions of religion.[45] Since the 1990s, those influences and its outcomes have been the topic of debate among scholars of Hinduism,[46][note 6] and have also been taken over by critics of the Western view on India.[47][note 7] + +Beliefs + +Temple wall panel relief sculpture at the Hoysaleswara temple in Halebidu, representing the Trimurti: Brahma, Shiva and Vishnu. +Hindu beliefs include (but are not restricted to) Dharma (ethics/duties), Samsāra (the continuing cycle of birth, life, death and rebirth), Karma (Every action has a reaction), Moksha (liberation from samsara or liberation in this life), and the various Yogas (paths or practices).[19] + +Purusharthas (objectives of human life) +Main article: Purusharthas +See also: Initiation, Dharma, Artha, Kāma, and Mokṣa +Hindism have accepted four proper goals or aims of human life: Dharma, Artha, Kama and Moksha. These are known as the Puruṣārthas:[17][18] + +Dharma (righteousness, ethics) +Dharma is considered one of the most important goal of a human being in Hinduism.[48] Dharma is considered Important because it is dharma which makes running of Universe and life possible,[49] and includes duties, virtues and "right way of living".[50] Hindu Dharma includes the religious duties, moral rights and duties of each individual, as well as behaviors that enable social order, right conduct, and those that are virtuous.[50] The Brihadaranyaka Upanishad states it as: + +Nothing is higher than Dharma. The weak overcomes the stronger by Dharma, as over a king. Truly that Dharma is the Truth (Satya); Therefore, when a man speaks the Truth, they say, "He speaks the Dharma"; and if he speaks Dharma, they say, "He speaks the Truth!" For both are one. + +— Brihadaranyaka Upanishad, 1.4.xiv [51][52] +In the Mahabharata, Krishna says it is Dharma which is holding both this-worldly and other-worldly affairs. (Mbh 12.110.11). The word Sanātana means eternal, perennial, or forever; thus, Sanātana Dharma means that it is the dharma that has neither beginning nor end.[53] + +Artha (livelihood, wealth) +Artha is second goal of life in Hinduism which means pursuit of wealth for livelihood, and economic prosperity. It includes political life, diplomacy and material well-being. The Artha includes all "means of life", activities and resources that enables one to be in a state one wants to be in, wealth, career and financial security.[54] The aim of artha is considered an important aim of human life in Hinduism.[55][56] + +Kāma (sensual pleasure) +Main article: Kama +Kāma (Sanskrit, Pali; Devanagari: काम) means desire, wish, passion, pleasure of the senses, the enjoyment of life, affection, or love, with or without sexual connotations.[57][58] In Hinduism, Kama is considered an important and healthy goal of human life when pursued without sacrificing Dharma, Artha and Moksha.[59] + +Mokṣa (liberation, freedom from samsara) +Main article: Moksha +Moksha (Sanskrit: मोक्ष mokṣa) or mukti (Sanskrit: मुक्ति) is the ultimate, most important goal in Hinduism. In one school Moksha means liberation from sorrow, suffering and saṃsāra (birth-rebirth cycle).[60][61] In other schools of Hinduism, such as monistic, moksha means self-realization,"realizing the whole universe as the Self".[62][63] + +Karma and samsara +Main article: Karma +Karma means action, work, or deed,[64] and also the vedic theory of cause and effect".[65][66] The theory is a combination of (1) causality that may be moral or non-moral; (2) moralization, that is good or bad actions have consequences; and (3) rebirth.[67] Karma theory means ''Whatever experience currently a man have is due to his/her past work''. These actions may be in a person's current life, or, in some schools of Hinduism, actions in their past lives.[67][68] This cycle of birth, life, death and rebirth is called samsara. Liberation from samsara through moksha is believed to ensure lasting happiness and peace.[69][70] Hindu scriptures teach that the future depends on the current action and our past deeds. + +Moksha +The ultimate goal of life,according to Hinduism is moksha, nirvana or samadhi, but is understood in different ways in different schools.For example, Advaita Vedanta says that after attaining moksha a person knows their "soul, self" and identifies it as one with Brahman (Ultimate reality or cause of everything).[71][72] The followers of Dvaita (dualistic) schools,state that after attaining moksha a person identify "soul, self" different from Brahman but very close to Brahman, and after attaining moksha one will spend eternity in a loka (higher planes). According to theistic schools of Hinduism, moksha is liberation from samsara, while for other schools such as the monistic school, moksha is possible in current life and is a psychological concept. + +Concept of God +Main articles: Ishvara and God in Hinduism +Hinduism is diverse and Hinduism include monotheism, polytheism, panentheism, pantheism, pandeism, monism, and atheism among others;[40][73][web 2] Basically it depends on individuals choice and that's why sometimes Hinduism is referred to as henotheistic (i.e., involving devotion to a single god while accepting the existence of others), but any such term is an over generalization.[74] + +Gods and Goddesses in Hinduism +Shiva +Durga +Lakshmi +Vishnu +Hindus believe that all living creatures have a soul. This soul or true "self" of every living being is called the ātman. The soul is believed to be eternal.[75] According to the monistic/pantheistic (non-dualist) theologies of Hinduism (such as Advaita Vedanta school), this Atman is indistinct from Brahman.[76] The goal of life, according to the Advaita school, is to realise that one's soul is identical to supreme soul, that the supreme soul is present in everything and everyone, all life is interconnected and there is oneness in all life.[77][78][79] Dualistic schools (see Dvaita and Bhakti) sees Brahman as a Supreme Being separate from individual souls.[80] They worship the Supreme Being variously as Vishnu, Brahma, Shiva, or Shakti, depending upon the sect. God is called Ishvara, Bhagavan, Parameshwara, Devadu or Devi, and these terms have different meanings in different schools of Hinduism.[81][82][83] Devi is typically used when refereeing to a female goddess. + +[84][85][86] + +The Hindu scriptures refer to celestial entities called Devas (or devī in feminine form; devatā used synonymously for Deva in Hindi), which in English means demi-gods or heavenly beings.[note 8] The devas are an integral part of Hindu culture and are depicted in art, architecture and through icons, and stories about them are related in the scriptures, particularly in Indian epic poetry and the Puranas. They are, however, often distinguished from Ishvara, a personal god, with many Hindus worshipping Ishvara in one of its particular manifestations as their iṣṭa devatā, or chosen ideal.[87][88] The choice is a matter of individual preference,[89] and of regional and family traditions.[89][note 9] The multitude of Devas are considered as manifestations of Brahman.[note 10] + +Main traditions +Main article: Hindu denominations + +A Ganesha-centric Panchayatana ("five deities", from the Smarta tradition): Ganesha (centre) with Shiva (top left), Devi (top right), Vishnu (bottom left) and Surya (bottom right). All these deities also have separate sects dedicated to them. +Hinduism has no central doctrinal authority and Hindus do not claim to belong to any particular sect or tradition.[91] Four major sects in Hinduism are: Vaishnavism, Shaivism, Shaktism and Smarthism.[92][93] + +Vaishnavism is the tradition that worships Vishnu[94] and his avatars, such as Krishna and Rama.[95] The people of this sect are generally non-ascetic, monastic.[96] These practices include community dancing, singing of Kirtans and Bhajans, with sound and music believed by some to have meditative and spiritual powers.[97] + +Shaivism is the tradition that focuses on Shiva. Shaivas are more attracted to ascetic individualism, and it has several sub-schools.[96] Their practices include Bhakti-style devotion but they leaned to philosply such as Advaita and Yoga.[92][97] Some Shaivas worship in temples, but some practice yoga, striving to be one with Shiva within.[98] Shaivas visualize god as half male, half female, as a combination of the male and female principles (Ardhanarishvara). Shaivism is related to Shaktism, wherein Shakti is seen as wife of Shiva.[92] Shaivism is mainly practiced in the Himalayan north from Kashmir to Nepal, and in south India.[99] + +Shaktism focuses on goddess worship of Shakti or Devi as cosmic mother,[96] and it is mainly worshipped in northeastern and eastern states of India such as Assam and Bengal. Devi is depicted as in gentler forms like Parvati, the consort of Shiva; or, as warrior goddesses like Kali and Durga.[100] Community celebrations include festivals, some of which include processions and idol immersion into sea or other water bodies.[101] + +Smartism worship all the major Hindu deities like Shiva, Vishnu, Shakti, Ganesha, Surya and Skanda.[102] The Smarta tradition developed during the (early) Classical Period of Hinduism around the beginning of the Common Era, when Hinduism emerged from the interaction between Brahmanism and local traditions.[103][104] The Smarta tradition is very much same as Advaita Vedanta, and consider Adi Shankara as its founder or reformer, who considered worship of God-with-attributes (saguna Brahman) as a journey towards ultimately realizing God-without-attributes (nirguna Brahman, Atman, Self-knowledge).[105][106] + +Hindu texts +Hindu text are world's oldest and had been written in Sanskrit and Tamil. The oldest Text is Rig Veda which is about 4000 years old.Hindu Texts can be divided in two parts: + +Shruti (what is heard) +Smriti (what is remembered) +Shruti +Shruti or Shruthi (Sanskrit: श्रुति; IAST: Śruti; IPA/Sanskrit: [ʃrut̪i]) in Sanskrit means "that which is heard" These ancient religious texts comprising the central canon of Hinduism includes the four Vedas including its four types of attached texts - the Samhitas, the Brahmanas, the Aranyakas and the early Upanishads + +Smriti +Smriti (Sanskrit: स्मृति, IAST: Smṛti), means "that which is remembered" are a body of Hindu texts. Smriti were the texts which were remembered and were spread through mouth from generation to generation. Smriti includes (the Mahābhārata and Rāmāyana), the Dharmasūtras and Dharmaśāstras (or Smritiśāstras), the Arthasaśāstras, the Purānas, the Kāvya or poetical literature. + +Festivals +There are many Hindu Festivals celebrated throughout the world but mainly in India and Nepal. These festivals include worship, offerings to deities, fasting, rituals, fairs, charity, celebrations, Puja, etc. The festivals mainly celebrate events from Hindu mythology, changes in season, changes in Solar System. Different sects celebrate different festivals but festivals like Diwali, Holi, Shivratri, Raksha Bandhan, Janamashtmi etc. are celebrated by the majority of Hindus. + +History +Periodisation +Hinduism can be divided in following ages + +Prevedic religions (pre-history and Indus Valley Civilisation; until c. 1500 BCE); +Vedic period (c. 1500–500 BCE); +"Second Urbanisation" (c. 500–200 BCE); +Classical Hinduism (c. 200 BCE-1100 CE);[note 20] +Pre-classical Hinduism (c. 200 BCE-300 CE); +"Golden Age" (Gupta Empire) (c. 320–650 CE); +Late-Classical Hinduism - Puranic Hinduism (c. 650–1100 CE); +Islam and sects of Hinduism (c. 1200–1700 CE); +Modern Hinduism (from c. 1800). +Origins + +The Pashupati seal, Indus Valley civilization +The origins of Hinduism are unknown but the earliest traces of Hinduism come from Mesolithic in the sites such as the rock paintings of Bhimbetka rock shelters dating to a period of 30,000 BCE or older,[note 11] as well as neolithic times.[note 12] Some of the religious practices can be considered to have originated in 4000 BCE. Several tribal religions still exist, though their practices may not resemble those of prehistoric religions.[web 3] + +Varna +According to one view, the Varna, which later transformed into caste system during the British rule, shows how strongly many have felt about each person following his or her dharma, or destined path. Many Hindus say it goes against the true meaning of dharma. However, Varna plays a big role in Hindu society. It's later transformation as Caste system by the British rule of India lost favor and became illegal after the independence of India. + +Temples +Puja (worship) takes place in the Mandir (temple). Mandirs vary in size from small village shrines to large buildings, surrounded by walls. People can also visit the Mandir at any time to pray and participate in the bhajans (religious songs). Hindus also worship at home and often have a special room with a shrine to particular gods. + +Temple construction in India started nearly 2000 years ago. The oldest temples that were built of brick and wood no longer exist. Stone later became the preferred material. Temples marked the transition of Hinduism from the Vedic religion of ritual sacrifices to a religion of Bhakti or love and devotion to a personal deity. Temple construction and mode of worship is governed by ancient Sanskrit scriptures called agamas, of which there are several, which deal with individual deities. There are substantial differences in architecture, customs, rituals and traditions in temples in different parts of India. During the ritual consecration of a temple, the presence of the universal all-encompassing Brahman is invoked into the main stone deity of the temple, through ritual, thereby making the deity and the temple sacred and divine + +Alternative cultures of worship +The Bhakti schools +The Bhakti (Devotional) school takes its name from the Hindu term that signifies a blissful, selfless and overwhelming love of God as the beloved Father, Mother, Child, or whatever relationship finds appeal in the devotee's heart. The philosophy of Bhakti seeks to tap into the universal divinity through personal form, which explains the proliferation of so many gods and goddesses in India, often reflecting the singular inclinations of small regions or groups of people. Seen as a form of Yoga, or union, it seeks to dissolve the ego in God, since consciousness of the body and limited mind as self is seen to be a divisive factor in spiritual realization. Essentially, it is God who effects all change, who is the source of all works, who acts through the devotee as love and light. 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God. The Bhakti movements rejuvenated Hinduism through their intense expression of faith and their responsiveness to the emotional and philosophical needs of India. They can rightly be said to have affected the greatest wave of change in Hindu prayer and ritual since ancient times. + +The most popular means of expressing love for God in the Hindu tradition has been through puja, or ritual devotion, frequently using the aid of a murti (statue) in conjunction with the singing or chanting of meditational prayer in the form of mantras. + +Devotional songs called bhajans (written primarily from the 14th-17th centuries), kirtan (praise), and arti (a filtered down form of Vedic fire ritual) are sometimes sung in conjunction with performance of puja. This rather organic system of devotion attempts to aid the individual in connecting with God through symbolic medium. It is said, however, that the bhakta, through a growing connection with God, is eventually able to avoid all external form and is immersed entirely in the bliss of undifferentiated Love in Truth. + +Altogether, bhakti resulted in a mass of devotional literature, music and art that has enriched the world and gave India renewed spiritual impetus, one eschewing unnecessary ritual and artificial social boundaries. See bhakti yoga for more. + +Tantrism +Main article: Tantra +According to the most famous Western Tantrik scholar, Sir John Woodroffe (pseudonym Arthur Avalon): "The Indian Tantras, which are numerous, constitute the Scripture (Shastra) of the Kaliyuga, and as such are the voluminous source of present and practical orthodox 'Hinduism'. The Tantra Shastra is, in fact, and whatever be its historical origin, a development of the Vaidika Karmakanda, promulgated to meet the needs of that age. Shiva says: 'For the benefit of men of the Kali age, men bereft of energy and dependent for existence on the food they eat, the Kaula doctrine, O auspicious one! is given' (Chap. IX., verse 12). To the Tantra we must therefore look if we would understand aright both ritual, yoga, and sadhana of all kinds, as also the general principles of which these practices are but the objective expression." (Introduction to Sir John Woodroffe's translation of "Mahanirvana Tantra.") + +The word "tantra" means "treatise" or "continuum", and is applied to a variety of mystical, occult, medical and scientific works as well as to those which we would now regard as "tantric". Most tantras were written in the late Middle Ages and sprang from Hindu cosmology and Yoga. + +Important symbolism and themes in Hinduism +Ahimsa and the cow +Many Hindus are vegetarians (do not eat meat) because of their respect for life. About 30% of today's Hindu population, especially in orthodox communities in South India, in certain northerly states like Gujarat, and in many Brahmin areas around the subcontinent, are vegetarian. + +Most Hindus who do eat meat do not eat beef. Some do not even use leather products. This is most likely because many Hindus have relied so heavily on the cow for all sorts of dairy products, tilling of fields and fuel for fertiliser that its status as a willing 'caretaker' of humanity grew to identifying it as an almost motherly figure. Thus, while most Hindus do not worship the cow, and rules against eating beef arose long after the Vedas had been written, it still has an honored place in Hindu society. It is said that Krishna is both Govinda (herder of cows) and Gopala (protector of cows), and Shiva's attendant is Nandi, the bull. With the stress on vegetarianism (which is usually followed even by meat-eating Hindus on religious days or special occasions) and the sacred nature of the cow, it is no wonder that most holy cities and areas in India have a ban on selling meat-products and there is a movement among Hindus to ban cow-slaughter not only in specific regions, but in all of India. + +Hindu symbols +Hindus use many symbols and signs. The two most important symbols used by Hindus are the "Aum" and the "Swastika (Hinduism)". + +Forms of worship: murtis and mantras +Contrary to popular belief, practiced Hinduism is neither polytheistic nor strictly monotheistic. The various Hindu gods and avatars that are worshipped by Hindus are understood as different forms of One truth, sometimes seen as beyond a mere god and as a formless Divine Ground (Brahman), akin but not limited to monism, or as one monotheistic principle like Vishnu or Shiva. + +Whether believing in the One source as formless (nirguna brahman, without attributes) or as a personal god (saguna Brahman, with attributes), Hindus understand that the one truth may be seen as different to different people. Hinduism encourages devotees to describe and develop a personal relationship with their chosen deity (ishta devata) in the form of a god or goddess. + +While some censuses hold worshippers of one form or another of Vishnu (known as Vaishnavs) to be at 80% and those of Shiva (called Shaivaites) and Shakti at the remaining 20%, such figures are perhaps misleading. The vast majority of Hindus worship many gods as varicolored forms of the same prism of Truth. Among the most popular are Vishnu (as Krishna or Rama), Shiva, Devi (the Mother as many female deities, such as Lakshmi, Saraswati, Kali and Durga), Ganesha, Skanda and Hanuman. + +Worship of the said deities is often done through the aid of pictures or icons (murti) which are said not to be God themselves but conduits for the devotee's consciousness, markers for the human soul that signify the ineffable and illimitable nature of the love and grandeur of God. They are symbols of the greater principle, representing and are never presumed to be the concept or entity itself. Thus, Hindu image worship is a form of iconolatry, in which the symbols are venerated as putative sigils of divinity, as opposed to idolatry, a charge often levied (erroneously) at Hindus. For more details on this form of worship, see murti. + +Mantra +Hindus use several prayers and group of words. Some group of words are called mantras. These words are said to give the speaker a deeper concentration and understanding, thus coming closer to Brahman. A well known mantra is om or aum. It symbolizes Brahman, and is often the opening word in many prayers. To pronounce a mantra well, you should say it slowly, and in a deep voice. + +Geographic distribution +The nations of India, Mauritius, and Nepal as well as the Indonesian island of Bali have more people who are Hindus than people who are not Hindus.In these nations, specially Nepal and India Hinduism is very popular. These countries also have many Hindus: + +Bangladesh (12 million), +Sri Lanka (2.5 million), +the United States (2.0 million) +Pakistan (3.3 million), +South Africa (1.2 million), +the United Kingdom (1.2 million), +Malaysia (1.1 million), +Canada (0.7 million), +Fiji (0.5 million), +Trinidad and Tobago (0.5 million), +Guyana (0.4 million), +the Netherlands (0.4 million), +Singapore (0.3 million) +Myanmar (0.3 million), +Suriname (0.2 million), +Australia (0.1 Million). +There are also strong Hindu communities in the countries of the ex-Soviet Union, especially in Russia and Poland. The Indonesian islands of Java, Sulawesi, Sumatra, and Borneo also have big native Hindu populations. In its Yoga stream, Hinduism is even more widespread all over the world with 30 million (less than one percent can not be 30 million for US population) Hindus in the United States alone. + +== Introduction == +The Messi–Ronaldo rivalry is an association football rivalry between Argentine forward Lionel Messi and Portuguese forward Cristiano Ronaldo. Having won a combined eleven Ballon d'Or/FIFA Ballon d'Or awards (six for Messi and five for Ronaldo) and ten European Golden Shoe awards (six for Messi and four for Ronaldo), both are widely regarded as two of the greatest players of all time. They are two of the most decorated football players ever, having won a combined 66 trophies (Ronaldo 32, Messi 34) during their senior careers thus far, and have regularly broken the 50-goal barrier in a single season. They are amongst the 51 players in the history of the sport to score over 500 career goals, with both having scored over 700 goals each in their careers for club and country. Ronaldo currently holds the world record for most official goals in a career, while Messi has the superior goals-to-games ratio (0.81, to Ronaldo's 0.73).Journalists and pundits regularly argue the individual merits of both players in an attempt to establish who they believe is the best attacker in modern football or ever. Regardless of preference, football critics generally agree that both are the greatest players of their generation, outperforming their peers numerically, largely by a significant margin. Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's physical attributes, goalscoring skills, leadership and influence under pressure is well-appreciated worldwide. It has been compared to past global sports rivalries such as the Magic Johnson-Larry Bird rivalry in basketball, the Muhammad Ali–Joe Frazier rivalry in boxing, the Roger Federer–Rafael Nadal rivalry and the Björn Borg–John McEnroe rivalry in tennis, and the Alain Prost–Ayrton Senna rivalry in Formula One motor racing. Some commentators choose to analyse the differing physiques and playing styles of the two, while part of the debate revolves around the contrasting personalities of the two players; Ronaldo is often described as someone of temperamental character while Messi is considered to have a more reserved character.At club level, Messi and Ronaldo represented rivals FC Barcelona and Real Madrid CF for nearly a decade. The two players faced each other at least twice every season in the world's most popular regular-season club game, El Clásico (among the most viewed annual sporting events), from Ronaldo's arrival at Madrid in 2009 until his transfer to Italian club Juventus F.C. in 2018. Off the field, they are also the face of two rival sportswear manufacturers, Messi of Adidas and Ronaldo of Nike, which are also the kit suppliers of their national teams and the opposite for their clubs. The two highest-paid players in football, Messi and Ronaldo are among the world's best-paid sports stars in combined income from salaries, bonuses and off-field earnings. In 2018, Messi topped Ronaldo on the Forbes' list of highest-paid sportspeople, earning $111 million, with Ronaldo next on the list at $108 million. + +--- + +== History == +In 2007, Ronaldo and Messi finished as runners-up to A.C. Milan's Kaká in both the Ballon d'Or, an award rewarded to the player voted as the best in the world by an international panel of sports journalists; and the FIFA World Player of the Year, an award voted for by coaches and captains of international teams. In an interview that year, Messi was quoted as saying that "Cristiano Ronaldo is an extraordinary player and it would be brilliant to be in the same team as him."They first played in a game against each other when Manchester United were drawn to play Barcelona in the 2007–08 UEFA Champions League semi-finals and were immediately pitted as major rivals. Ronaldo missed a penalty in the first leg, but United eventually advanced to the final via a Paul Scholes goal. At the end of the year, Ronaldo was awarded the Ballon d'Or and vowed that he would win the award again.The 2009 UEFA Champions League Final was contested between Manchester United and Barcelona on 27 May 2009 at the Stadio Olimpico in Rome, Italy. The match, described as a "dream clash", was again hyped as the latest battle between the two, this time to settle who was the best player in the world; Ronaldo claimed he was the better of the two, while Messi's club-mate Xavi sided with him. Manchester United manager Alex Ferguson was more diplomatic, praising both players as being amongst the world's elite talents. Messi, playing in a central role he was unaccustomed to so he would avoid a direct battle with Manchester United left-back Patrice Evra, scored Barcelona's second in a 2–0 victory with a header in the 70th minute. Meanwhile, Ronaldo was subdued for much of the game, despite some early chances to score, and his frustration eventually showed when he was booked for a rash tackle on Carles Puyol.From 2009 to 2018, the two played against each other at least twice per season during El Clásico matches but also met many other times competitions such as the Copa del Rey, the Supercopa de España, and, a two-legged Champions League semifinal in 2011. Messi won four consecutive Ballon d'Or awards (2009 to 2012), with a fifth coming in 2015, while Ronaldo had equalled Messi's total of five with wins in 2013, 2014, 2016, and 2017. In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. In total, Messi and Ronaldo reached the podium a record twelve times each. Messi won two Champions League while Ronaldo was playing for Real Madrid, with the latter securing four out of five titles between 2014 and 2018. Messi during this period won five Pichichi trophies and European Golden Shoe awards (2010, 2012, 2013, 2017 and 2018), while Ronaldo won these prizes thrice each (2011, 2014 and 2015). From 2009 to 2018, Ronaldo was the Champions League top scorer on six occasions, with Messi achieving this feat four times (including in 2015 when the pair finished joint-top). + +--- + +== Relationship between Messi and Ronaldo == +In a 2015 interview, Ronaldo commented on the rivalry by saying: "I think we push each other sometimes in the competition, this is why the competition is so high." Alex Ferguson, Ronaldo's manager during his time at Manchester United, opined: "I don't think the rivalry against each other bothers them. I think they have their own personal pride in terms of wanting to be the best." Messi has denied any rivalry, and blames the media for creating it, stating that "only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano."It is widely argued and documented that there is an atmosphere of competition between the duo, with Guillem Balagué claiming in the book Ronaldo that he refers to his Argentine counterpart as a "motherfucker" behind his back, and Luca Caioli saying in his book Ronaldo: The Obsession for Perfection that, according to his sources, Ronaldo heats up when watching Messi play. In response to claims that he and Messi do not get on well on a personal level, Ronaldo commented: "We don't have a relationship outside the world of football, just as we don't with a lot of other players", before adding that in years to come he hopes they can laugh about it together, stating: "We have to look on this rivalry with a positive spirit, because it's a good thing." On 13 November 2014, Ronaldo also threatened to take legal action over the remarks made by Balagué. After Ronaldo's departure from Real Madrid to Juventus, Messi admitted to missing him, saying: "I miss Cristiano. Although it was a bit difficult to see him win trophies, he gave La Liga prestige." During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to "have dinner together in the future", to which Messi later replied: "If I get an invitation, why not?" + +--- + +== Awards and records == +Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken a multitude of goalscoring records for both club and country, feats which have been described as "incredible", "ridiculous", and "remarkable", respectively. The rivalry itself has been described as one about records and reputation of the players, rather than one based in loathing.Messi is the all-time La Liga top scorer, as well as having the most goal contributions (goals + assists) in the history of the top European leagues, with comfortably over 1,000 totals; meanwhile, Ronaldo places second for goals scored and in third for assists provided, whilst being the UEFA Champions League all-time top goalscorer and assist provider, with Messi in second for both goals scored and assists provided in the competition's history, although once more having a superior ratio of goal contributions to games. Messi also holds both the most goals in a season (73), and second most assists in a season (34), just one behind midfielder Juan Mata's record. He is the outright holder of most goal contributions in a season, with 107 in 2011–12. The two had broken each other's record over the course of 2015 after Messi surpassed the previous record holder Raúl in November 2014. Ronaldo opened a gap in the 2015–16 season when he became the first player to score double figures in the group stage of the Champions League, setting the record at 11 goals. They are the first two players to score 100 goals in UEFA Champions League history.They dominated the Ballon d'Or/The Best FIFA Men's Player awards since 2008, and UEFA Men's Player of the Year Award since 2014; in 2018, their roughly decade-long triumph was ended by Luka Modrić, seen as "the end of an era." In an interview for the France Football, Modrić stated that "history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. Nobody has the right to compare themselves to them." Since 2008, Messi has won six Ballons d'Or to Ronaldo's five, six FIFA World's Best Player awards to Ronaldo's five, and six European Golden Shoes to Ronaldo's four.Collectively, Ronaldo has won 32 major trophies including seven league titles and five Champions Leagues, and guided Portugal to their first trophies UEFA Euro 2016 and the 2019 UEFA Nations League. He has also won four national cups, two league cups, six national super cups, two European Super Cups, and four FIFA Club World Cups.Messi has won a club record 34 major trophies as a Barcelona player, including ten league titles, four Champions Leagues, seven Copa del Rey titles, seven Spanish Super Cups, three European Super Cups, and three FIFA Club World Cups. As for youth competitions, Messi won a gold medal at the 2008 Olympic Games held in China, and he had previously won the FIFA U-20 World Cup in 2005, being elected best player and top scorer of the tournament. Moreover, Messi was runner-up at three Copa Américas and at the 2014 World Cup, before finally claiming his first major international trophy at the 2021 Copa América where he was named joint-best player and top scorer. + +--- + +== Head-to-head == +In past years, it had been said that the rivalry between Real Madrid and Barcelona has been "encapsulated" by the individual rivalry between Ronaldo and Messi, with there never being a goalless El Clásico between the two. + +== Introduction == + +The Messi-Ronaldo rivalry is a football rivalry between two very good football players: Cristiano Ronaldo and Lionel Messi. Many people think that both Messi and Ronaldo are the best players in the world today. Both have won the Ballon d'Or many times, six for Messi and five for Ronaldo, as well as many other awards, such as the European Golden Shoe. They are two of the most successful football players ever, having won a combined 66 trophies. Messi has won 34 trophies in his senior career, and Ronaldo has won 32 trophies in his senior career. They are the only two players in the history of the sport to score 500 or more goals in a single season. They have both scored over 700 goals in their careers. Ronaldo currently holds the world record for most official goals in a career, while Messi has the superior goals-to-games ratio (0.81, to Ronaldo's 0.73). Many people think that Ronaldo is the best player in the world today, while some people think that Messi is the best player ever. No matter what people think, football people generally agree that both are the greatest players of their time. Many people think they are the best players of their time. Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's leadership and influence under pressure is well-appreciated worldwide. It has been compared to past global sports rivalries such as the Muhammad Ali-Joe Frazier rivalry in boxing, the Alain Prost-Ayrton Senna rivalry in Formula One motor racing, the Björn Borg-John McEnroe rivalry in tennis, and the Roger Federer-Rafael Nadal rivalry in tennis. Some commentators choose to analyse the differing physiques and playing styles of the two, while part of the debate revolves around the contrasting personalities of the two players; Messi is often described as a player with a strong personality, while Ronaldo is considered to have a more reserved character. The two players play against each other at least twice every season in the world's most popular regular-season club game, El Clásico. Since Ronaldo joined Madrid in 2009, they have played against each other at least twice every season. in 2018. Off the field, they are also the face of two rival sportswear manufacturers, Adidas and Nike. Adidas and Nike both supply the national teams of both Messi and Ronaldo. The two highest-paid players in football, Messi and Ronaldo are among the world's best-paid sports stars. They both make a lot of money from their jobs. In 2018, Messi made $111 million on the Forbes list of the world's most paid sportspeople. Ronaldo made $108 million on the list. + +== History == + +In 2007, Ronaldo and Messi finished as runners-up to A.C. Milan's Kaká in both the Ballon d'Or and the FIFA World Player of the Year. The Ballon d'Or is an award given to the best player in the world by an international group of sports journalists. In an interview that year, Messi said that he would like to play with Ronaldo. He said that "Cristiano Ronaldo is an extraordinary player. They first played against each other when Manchester United were playing Barcelona in the UEFA Champions League in 2007. They played against each other for the first time in this game. In the first leg, Ronaldo missed a penalty. In the end, Paul Scholes scored United's goal. At the end of the year, Ronaldo was awarded the Ballon d'Or and said that he would win the award again. The 2009 UEFA Champions League Final was played on May 27, 2009 at the Stadio Olimpico in Rome, Italy. The match, described as a "dream clash", was again hyped as the latest battle between the two, with Messi's club-mate Xavi saying that Ronaldo was the better player, while Ronaldo said that he was the best player in the world. Manchester United manager Alex Ferguson thought that both players were very good. He said that they were the best in the world. Messi, playing in a central role so he would not have to fight with Manchester United left-back Patrice Evra, scored Barcelona's second goal in a 2-0 victory. He scored the goal in the 70th minute. Meanwhile, Ronaldo was subdued for much of the game, despite some early chances to score, and his frustration eventually showed when he was booked for a rash tackle on Carles Puyol. The two have played against each other at least twice a season since the beginning of the 2009\/10 season, including a two-legged Champions League semifinal in 2011. Messi has won the Ballon d'Or four times in a row (2009 to 2012), while Ronaldo has won it five times. Ronaldo won the award in 2013, 2014, 2016, and 2017. In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third. In total, Messi and Ronaldo have both won the World Cup twelve times. While Ronaldo was playing for Real Madrid, Messi won two Champions League games. Real Madrid have won four out of five titles since 2014. During this time, Messi has won five Pichichi trophies and three European Golden Shoe awards. Ronaldo has won these awards three times, once each in 2011, 2014 and 2015. From 2009 to 2018, Ronaldo has scored more goals in the Champions League than Messi four times. In 2015, the two players finished as the top scorers together. + +== Relationship between Messi and Ronaldo == + +In a 2015 interview, Ronaldo talked about the rivalry. He said: "Sometimes we push each other a little bit, that's why the competition is so good. Alex Ferguson, who managed Ronaldo when he was at Manchester United, said that he does not care about the other players. I think they have their own personal pride. They want to be the best at it." Messi has denied any rivalry, and blames the media for creating it. He said that "only the media, the press, wants us to be at loggerheads (not playing against each other). "It is widely argued and documented that there is an atmosphere of competition between the duo, with Luca Caioli saying in his book Ronaldo: The Obsession for Perfection that, according to his sources, Ronaldo heats up when watching Messi play, and Guillem Balagué claiming in the book that he refers to his Argentine counterpart as a "motherfucker" behind his back. He has said that he and Messi do not get on well on a personal level, and that they do not have a good relationship outside of football. He said that they do not have a good relationship with a lot of other players, and that in years to come he hopes they can laugh about it together. On 13 November 2014, Ronaldo also said that he would take legal action against Balagué. After Ronaldo left Real Madrid to join Juventus, Messi said that he "missed" him, "I miss Cristiano". It was a little hard for him to win trophies, but he gave La Liga importance." During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to "have dinner together in the future", to which Messi said: "If I get an invitation, why not?" + +== Awards and records == + +Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken many goalscoring records for both club and country. People have said that they are both "incredible" and "remarkable" for having scored so many goals. Messi is the all-time La Liga top scorer, as well as having the most goal contributions (goals + assists) in the history of the top European leagues, while being the UEFA Champions League all-time top goalscorer and assist provider. Ronaldo is the UEFA Champions League all-time top goalscorer and assist provider, with Messi in second for both goals scored and assists provided in the competition's history, although once more having a superior ratio of goal contributions to games. Messi also holds the record for most goals in a season with 73. He also holds the record for most assists with 34. He has scored the most goals in a single season with 107 in the 2011 â "12 season. The two had broken each other's record over the course of 2015. In November 2014, Messi beat the previous record holder. In the 2015/16 season, he became the first player in history to score double figures in the Champions League group stage, setting the record at 11 goals. They are the first two players to score 100 goals in UEFA Champions League history. They have won the Ballon d'Or/The Best FIFA Men's Player awards since 2008, and UEFA Men's Player of the Year Award since 2014; Luka Modrić won the award for the first time in his career in 2018. In an interview for the France Football, Modrić said that "history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them." Since 2008, Messi has won six Ballons d'Or to Ronaldo's five, six FIFA World's Best Player awards to Ronaldo's five, and six European Golden Shoes to Ronaldo's four. In that time, Messi has won seven league titles and five Champions Leagues, while Ronaldo has won seven league titles and five Champions Leagues. Messi has won a club record 34 major trophies as a Barcelona player, including ten league titles, four Champions Leagues, seven Copa del Rey titles, seven Spanish Super Cups, three European Super Cups, and three FIFA Club World Cups. He has also won four national cups, two league cups, six national super cups, and four FIFA Club World Cups. In 2008, Messi won a gold medal at the Olympic Games held in China, and in 2005, he won the FIFA U-20 World Cup. Messi was the best player at the 2008 Olympic Games, and he won the FIFA U-20 World Cup. In addition, Messi was runner-up at three Copa Américas and at the 2014 World Cup. At the 2021 Copa América, Messi was named joint-best player and top scorer, and he won his first major international trophy. + +== Head-to-head == + +In past years, it had been said that the rivalry between Real Madrid and Barcelona is the best in the world. This is because Messi and Ronaldo have always played against each other in El Clásico. diff --git a/Final model/Team_4/train_muss.ipynb b/Final model/Team_4/train_muss.ipynb new file mode 100644 index 0000000..b16c017 --- /dev/null +++ b/Final model/Team_4/train_muss.ipynb @@ -0,0 +1,1697 @@ +{ + "nbformat": 4, + "nbformat_minor": 0, + "metadata": { + "colab": { + "name": "MUSS.ipynb", + "provenance": [], + "collapsed_sections": [ + "eDgjjNeBXulf", + "VVkUDfv0TDNB", + "oRYaxhueUpCV", + "h3ZtlqW0Uu7r" + ], + "authorship_tag": "ABX9TyPjNHiywYAO3h/Q9OVtiJo8", + "include_colab_link": true + }, + "kernelspec": { + "name": "python3", + "display_name": "Python 3" + }, + "language_info": { + "name": "python" + } + }, + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "view-in-github", + "colab_type": "text" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "orOMOT86WBro" + }, + "source": [ + "# Wikipedia Simplifier" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FF1-rHc2WKV3" + }, + "source": [ + "This is a simplified (xD) implementation of the MUSS model presented in the paper [MUSS: Multilingual Unsupervised Sentence Simplification by Mining Paraphrases](https://arxiv.org/pdf/2005.00352.pdf) using *Pytorch* and *transformers* library. I have implemented it for English language only. This is not the exact implementation as described in the paper but similar to it. \n", + "\n", + "Since it is nearly impossible to train this model using Colab or using just 1 GPU and limited storage, so I have used the pretrained model for evaluations. Though the functions can be tested invidually to know if they work the way they are intended to do. Please check the other notebook to see the model in action." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "lUSVo7VVpVhE" + }, + "source": [ + "###Approach" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "G9fNXCjOiVh5" + }, + "source": [ + "####**1. Mining Paraphrases**\n", + "\n", + ">**a) Sequence Extraction**:\n", + ">A sequence consists of multiple sentences: to allow sentence splitting or fusion operations.\n", + "\n", + "* get docs from HEAD split in CCNet (categorised by language)\n", + "* doc -> sentences using NLTK sentence tokenizer\n", + "* adjacent sentences -> sequences with max character = 300\n", + "* filter sequences: remove those having >= 10% puctutation characters or low language model probability according to a 3-gram Kneser-Ney language model trained with kenlm on Wikipedia\n", + "\n", + "\n", + ">**b) Creating a Sequence Index using Embeddings**\n", + "\n", + "* extracted sequence (1 billion) -> 1024-dimensional embeddings using LASER (reduced to 512 by using PCA followed by random rotation)\n", + "\n", + "\n", + ">**c) Mining paraphrases**\n", + "\n", + "* Index these embeddings (for use with faiss)\n", + "* each sequence is used as a query (q_i) against these 1 billion sequences to find top 8 nearest neighbour using L2 distance (faiss), keep those with L2 distance < 0.05 and relative distance with other 7 neighbours < 0.6\n", + "* paraphrase filtering -
\n", + " - remove almost identical pp with character Levenshtein distance <= 20%\n", + " - remove pp coming from same document\n", + " - remove pp where one sqeuence is contained in other\n", + "\n", + "\n", + "####**2. Simplifying with ACCESS**\n", + "\n", + "* ACCESS is a method to make any seq2seq model controllable by conditioning on simplification-specific control tokens.\n", + "* Apply this to seq2seq pretrained transformer models based on the BART.\n", + "\n", + ">**Training with control tokens**\n", + "\n", + "- During train time, control tokens provided to model that give info about target sequence.\n", + "- During inference time, control the generation by selecting a given target control value.\n", + "- Prepend the following control tokens to every source in training set:\n", + "\t\t : Character Length Ratio\n", + "\t\t : replace-only Levenshtein similarity\n", + "\t\t : aggregated word frequency ratio\n", + "\t\t : dependency tree depth ratio\n", + "\n", + ">**Selecting Control values at Inference**\n", + "\n", + "- Shorter sentences are more adapted to people with cognitive disabilities, while using more frequent words are useful to second language learners.\n", + "- Choose these hyperparameters based on SARI score on validation set or by using prior knowledge based on target audience.\n", + "\n", + "\n", + "####**3. Leveraging Unsupervised Pretraining**\n", + "\n", + "* Fine tune the pretrained generative model BART on the newly created training corpora." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7eI3uQh0aKz9" + }, + "source": [ + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "eDgjjNeBXulf" + }, + "source": [ + "### Loading the data for mining paraphrases" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gCdgWwrClbSb" + }, + "source": [ + "**Get data from CCNet**" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "xHFOuBZggHJr", + "outputId": "c8d2f9fb-149c-49ed-9bec-1cb9b3cc8bf3" + }, + "source": [ + "!git clone https://github.com/facebookresearch/cc_net" + ], + "execution_count": 2, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Cloning into 'cc_net'...\n", + "remote: Enumerating objects: 471, done.\u001b[K\n", + "remote: Total 471 (delta 0), reused 0 (delta 0), pack-reused 471\u001b[K\n", + "Receiving objects: 100% (471/471), 169.97 KiB | 4.59 MiB/s, done.\n", + "Resolving deltas: 100% (329/329), done.\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "-01DRvtJgRCa", + "outputId": "f3d90067-730b-448d-b721-d65bd705cb86" + }, + "source": [ + "%cd cc_net/" + ], + "execution_count": 6, + "outputs": [ + { + "output_type": "stream", + "text": [ + "/content/cc_net\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "uy-YUQIQgND8" + }, + "source": [ + "!mkdir ./data/" + ], + "execution_count": 7, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "lGnwZzglgjpU" + }, + "source": [ + "!python -m pip install .[getpy]" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "irp46hVsfxdZ" + }, + "source": [ + "# !python -m cc_net --dump 2019-13\n", + "\n", + "# Note : this won't work here because it requires 7 TB storage :)" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "lmzTmKq1le5Y" + }, + "source": [ + "**Get sample data from wikipedia**" + ] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "hK0nxvZYleHY", + "outputId": "b297d88f-189d-4dc5-d261-9fdf0496869d" + }, + "source": [ + "# For testing purpose, we can provide some sample data\n", + "%pip install wikipedia" + ], + "execution_count": 1, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Collecting wikipedia\n", + " Downloading wikipedia-1.4.0.tar.gz (27 kB)\n", + "Requirement already satisfied: beautifulsoup4 in /usr/local/lib/python3.7/dist-packages (from wikipedia) (4.6.3)\n", + "Requirement already satisfied: requests<3.0.0,>=2.0.0 in /usr/local/lib/python3.7/dist-packages (from wikipedia) (2.23.0)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.0.0->wikipedia) (2.10)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.0.0->wikipedia) (2021.5.30)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.0.0->wikipedia) (3.0.4)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.0.0->wikipedia) (1.24.3)\n", + "Building wheels for collected packages: wikipedia\n", + " Building wheel for wikipedia (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for wikipedia: filename=wikipedia-1.4.0-py3-none-any.whl size=11696 sha256=6b368574f746b01ed9852ed3179de0aabed882e83b71ba7dd02d8d0818eb250e\n", + " Stored in directory: /root/.cache/pip/wheels/15/93/6d/5b2c68b8a64c7a7a04947b4ed6d89fb557dcc6bc27d1d7f3ba\n", + "Successfully built wikipedia\n", + "Installing collected packages: wikipedia\n", + "Successfully installed wikipedia-1.4.0\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "mNtWCLkmln_u" + }, + "source": [ + "import wikipedia" + ], + "execution_count": 1, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "AXU2spGTX3Ku" + }, + "source": [ + "# upload simple_wiki.txt file\n", + "\n", + "def get_data():\n", + " titles = [\"Messi\", \"Christiano Ronaldo\", 'messi-ronaldo rivalry', 'chernobyl', 'artificial intelligence', 'Hinduism']\n", + " docs = []\n", + " with open('simple_wiki.txt') as fo:\n", + " data = fo.read()\n", + " docs.append(data)\n", + " for title in titles:\n", + " page = wikipedia.page(title)\n", + " docs.append(page.content)\n", + " return docs" + ], + "execution_count": 2, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "VVkUDfv0TDNB" + }, + "source": [ + "### Mining Paraphrases" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "kGE16C89V8sE", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "6ff3e220-9429-4f77-d9de-b26c8644a044" + }, + "source": [ + "# Install required dependencies\n", + "%pip install laserembeddings python-Levenshtein faiss faiss-cpu" + ], + "execution_count": 27, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Requirement already satisfied: laserembeddings in /usr/local/lib/python3.7/dist-packages (1.1.1)\n", + "Requirement already satisfied: python-Levenshtein in /usr/local/lib/python3.7/dist-packages (0.12.2)\n", + "Requirement already satisfied: faiss in /usr/local/lib/python3.7/dist-packages (1.5.3)\n", + "Requirement already satisfied: faiss-cpu in /usr/local/lib/python3.7/dist-packages (1.7.1.post2)\n", + "Requirement already satisfied: torch<2.0.0,>=1.0.1.post2 in /usr/local/lib/python3.7/dist-packages (from laserembeddings) (1.9.0+cu102)\n", + "Requirement already satisfied: numpy<2.0.0,>=1.15.4 in /usr/local/lib/python3.7/dist-packages (from laserembeddings) (1.19.5)\n", + "Requirement already satisfied: transliterate==1.10.2 in /usr/local/lib/python3.7/dist-packages (from laserembeddings) (1.10.2)\n", + "Requirement already satisfied: subword-nmt<0.4.0,>=0.3.6 in /usr/local/lib/python3.7/dist-packages (from laserembeddings) (0.3.7)\n", + "Requirement already satisfied: sacremoses==0.0.35 in /usr/local/lib/python3.7/dist-packages (from laserembeddings) (0.0.35)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from sacremoses==0.0.35->laserembeddings) (1.15.0)\n", + "Requirement already satisfied: tqdm in /usr/local/lib/python3.7/dist-packages (from sacremoses==0.0.35->laserembeddings) (4.41.1)\n", + "Requirement already satisfied: joblib in /usr/local/lib/python3.7/dist-packages (from sacremoses==0.0.35->laserembeddings) (1.0.1)\n", + "Requirement already satisfied: click in /usr/local/lib/python3.7/dist-packages (from sacremoses==0.0.35->laserembeddings) (7.1.2)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from torch<2.0.0,>=1.0.1.post2->laserembeddings) (3.7.4.3)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from python-Levenshtein) (57.2.0)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "VT9PXDbXfu2p", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "7defbdae-900e-4c66-be15-4d649b797929" + }, + "source": [ + "# Import required libraries\n", + "import numpy as np\n", + "import faiss\n", + "\n", + "from string import punctuation\n", + "from nltk.tokenize import sent_tokenize\n", + "from laserembeddings import Laser\n", + "from sklearn.decomposition import PCA\n", + "from Levenshtein import distance as levenshtein_distance\n", + "\n", + "import nltk\n", + "nltk.download('punkt')" + ], + "execution_count": 28, + "outputs": [ + { + "output_type": "stream", + "text": [ + "[nltk_data] Downloading package punkt to /root/nltk_data...\n", + "[nltk_data] Package punkt is already up-to-date!\n" + ], + "name": "stdout" + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "True" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 28 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "jsWEF4izTPi9" + }, + "source": [ + "**Sequence Extraction**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "pnW6Sovgf1es" + }, + "source": [ + "def doc_to_sentences(doc):\n", + " \"\"\"\n", + " Splits a document into sentences.\n", + " \"\"\"\n", + " doc = doc.replace('\\n', ' ').replace('\\t', ' ').replace('\\x00', ' ')\n", + " return sent_tokenize(doc)\n", + "\n", + "\n", + "def filter_sentences(sentences, punc_ratio=0.1, lang_model=None, lang_prob=0.5):\n", + " \"\"\"\n", + " Filters sentences by removing those which contain a high number of punctuation marks \n", + " or have low language model probability or are too small.\n", + " \"\"\"\n", + " filtered_sentences = []\n", + "\n", + " for seq in sentences:\n", + " if len(seq) < 30:\n", + " continue\n", + "\n", + " if lang_model is not None:\n", + " prob = lang_model.prob(seq)\n", + " if prob < lang_prob:\n", + " continue\n", + "\n", + " num_punc = sum(1 for c in seq if c in punctuation)\n", + " if num_punc / len(seq) < punc_ratio:\n", + " filtered_sentences.append(seq)\n", + "\n", + " return filtered_sentences\n", + "\n", + "\n", + "def generate_sequences(sentences, max_chars=300):\n", + " \"\"\"\n", + " Generates sequences of adjacent sentences from a list of sentences.\n", + " \"\"\"\n", + " sequences = []\n", + " total_sentences = len(sentences)\n", + "\n", + " for i in range(total_sentences):\n", + " cur_seq = sentences[i]\n", + " cur_chars = len(cur_seq)\n", + " if cur_chars > max_chars:\n", + " continue\n", + " \n", + " sequences.append(cur_seq)\n", + " for j in range(i+1, total_sentences):\n", + " cur_chars += len(sentences[j])\n", + " if cur_chars > max_chars:\n", + " break\n", + " \n", + " cur_seq += ' ' + sentences[j]\n", + " sequences.append(cur_seq)\n", + "\n", + " return sequences" + ], + "execution_count": 29, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "i56payAETsln" + }, + "source": [ + "**Creating a sequence index using embeddings**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "kfesZM9zT9e1", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "e673db62-7305-4ae8-8cd3-fbcc6a5d1483" + }, + "source": [ + "!python -m laserembeddings download-models" + ], + "execution_count": 30, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Downloading models into /usr/local/lib/python3.7/dist-packages/laserembeddings/data\n", + "\n", + "✅ Downloaded https://dl.fbaipublicfiles.com/laser/models/93langs.fcodes \n", + "✅ Downloaded https://dl.fbaipublicfiles.com/laser/models/93langs.fvocab \n", + "✅ Downloaded https://dl.fbaipublicfiles.com/laser/models/bilstm.93langs.2018-12-26.pt \n", + "\n", + "✨ You're all set!\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "cf49FUnMT2WN" + }, + "source": [ + "def compute_embeddings(sequences, dim=512):\n", + " \"\"\"\n", + " Computes the embeddings for a list of sequences.\n", + " \"\"\"\n", + " laser = Laser()\n", + " embeddings = laser.embed_sentences(sequences, lang='en')\n", + " # embeddings is a N*1024 (N = number of sentences) NumPy array\n", + "\n", + " pca = PCA(n_components=dim)\n", + " embeddings = pca.fit_transform(embeddings)\n", + " return embeddings" + ], + "execution_count": 31, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "dLWBcLoNTimw" + }, + "source": [ + "**Mining Paraphrases**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "5QsHaWVUTlnf" + }, + "source": [ + "def index_embeddings(embeddings):\n", + " \"\"\"\n", + " Indexes a list of embeddings using a FAISS index.\n", + " \"\"\"\n", + " index = faiss.IndexFlatL2(embeddings.shape[1])\n", + " index.add(embeddings)\n", + " return index\n", + "\n", + "\n", + "def get_nearest_neighbors(index, embeddings, k=8):\n", + " \"\"\"\n", + " Returns the k nearest neighbors of each embedding in a list of embeddings.\n", + " \"\"\"\n", + " D, I = index.search(embeddings, k)\n", + " return D, I\n", + "\n", + "\n", + "def filter_nearest_neighbors(D, I, max_L2_dist=0.05):\n", + " \"\"\"\n", + " Filters the nearest neighbors to remove those which are too far from the queries.\n", + " \"\"\"\n", + " filtered_neighbors = np.ones(I.shape) * (-1)\n", + " for i in range(I.shape[0]):\n", + " for j in range(I.shape[1]):\n", + " if D[i,j] <= max_L2_dist:\n", + " filtered_neighbors[i,j] = I[i,j]\n", + " \n", + " filtered_neighbors = filtered_neighbors.astype(int)\n", + " return filtered_neighbors\n", + "\n", + "\n", + "def filter_paraphrases(I, sequences, min_l_dist=0.2):\n", + " \"\"\"\n", + " Removes almost identical pp with character level Levenshtein distance <= 20%\n", + "\tor pp from coming same document ** (need to implement this) **\n", + "\tor pp where one sequence is contained in other\n", + " \"\"\"\n", + " for i in range(I.shape[0]):\n", + " cur_seq = sequences[i]\n", + " for j in range(I.shape[1]):\n", + " if I[i,j] == -1:\n", + " continue\n", + " \n", + " target_seq = sequences[I[i,j]]\n", + " dist = levenshtein_distance(cur_seq, target_seq)\n", + " if dist <= min_l_dist:\n", + " I[i,j] = -1\n", + " continue\n", + " \n", + " if cur_seq in target_seq or target_seq in cur_seq:\n", + " I[i,j] = -1\n", + "\n", + " return I\n", + "\n", + "\n", + "def generate_aligned_paraphrases(I, sequences):\n", + " \"\"\"\n", + " Generates a list of paraphrases from the list of sequences and their nearest neighbors.\n", + " \"\"\"\n", + " paraphrases = []\n", + " for i in range(I.shape[0]):\n", + " cur_seq = sequences[i]\n", + " for j in range(I.shape[1]):\n", + " if I[i,j] == -1:\n", + " continue\n", + " \n", + " target_seq = sequences[I[i,j]]\n", + " paraphrases.append((cur_seq, target_seq))\n", + " \n", + " return paraphrases" + ], + "execution_count": 32, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "AzsTvRcmZr6u" + }, + "source": [ + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "oRYaxhueUpCV" + }, + "source": [ + "### Simplifying with ACCESS" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "nnzg7qEUZfpu", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "989a9121-ea45-4c5a-f5a6-fceebab58c3b" + }, + "source": [ + "!pip install python-Levenshtein" + ], + "execution_count": 9, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Requirement already satisfied: python-Levenshtein in /usr/local/lib/python3.7/dist-packages (0.12.2)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from python-Levenshtein) (57.2.0)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "1FfOGSIDiBLv", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "e39bace1-1a9e-4ec5-e069-631f874bea46" + }, + "source": [ + "!wget https://dl.fbaipublicfiles.com/fasttext/vectors-crawl/cc.en.300.vec.gz" + ], + "execution_count": 10, + "outputs": [ + { + "output_type": "stream", + "text": [ + "--2021-07-31 19:06:58-- https://dl.fbaipublicfiles.com/fasttext/vectors-crawl/cc.en.300.vec.gz\n", + "Resolving dl.fbaipublicfiles.com (dl.fbaipublicfiles.com)... 104.22.74.142, 104.22.75.142, 172.67.9.4, ...\n", + "Connecting to dl.fbaipublicfiles.com (dl.fbaipublicfiles.com)|104.22.74.142|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 1325960915 (1.2G) [binary/octet-stream]\n", + "Saving to: ‘cc.en.300.vec.gz.1’\n", + "\n", + "cc.en.300.vec.gz.1 100%[===================>] 1.23G 24.2MB/s in 53s \n", + "\n", + "2021-07-31 19:07:52 (23.8 MB/s) - ‘cc.en.300.vec.gz.1’ saved [1325960915/1325960915]\n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "1luRjujgh93o", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "7bcfb530-298a-4c1d-cde5-f30fe3b55259" + }, + "source": [ + "!python -m spacy download en_core_web_md" + ], + "execution_count": 11, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Collecting en_core_web_md==2.2.5\n", + " Downloading https://github.com/explosion/spacy-models/releases/download/en_core_web_md-2.2.5/en_core_web_md-2.2.5.tar.gz (96.4 MB)\n", + "\u001b[K |████████████████████████████████| 96.4 MB 1.3 MB/s \n", + "\u001b[?25hRequirement already satisfied: spacy>=2.2.2 in /usr/local/lib/python3.7/dist-packages (from en_core_web_md==2.2.5) (2.2.4)\n", + "Requirement already satisfied: tqdm<5.0.0,>=4.38.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (4.41.1)\n", + "Requirement already satisfied: numpy>=1.15.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (1.19.5)\n", + "Requirement already satisfied: cymem<2.1.0,>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (2.0.5)\n", + "Requirement already satisfied: wasabi<1.1.0,>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (0.8.2)\n", + "Requirement already satisfied: catalogue<1.1.0,>=0.0.7 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (1.0.0)\n", + "Requirement already satisfied: thinc==7.4.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (7.4.0)\n", + "Requirement already satisfied: plac<1.2.0,>=0.9.6 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (1.1.3)\n", + "Requirement already satisfied: srsly<1.1.0,>=1.0.2 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (1.0.5)\n", + "Requirement already satisfied: requests<3.0.0,>=2.13.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (2.23.0)\n", + "Requirement already satisfied: setuptools in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (57.2.0)\n", + "Requirement already satisfied: preshed<3.1.0,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (3.0.5)\n", + "Requirement already satisfied: blis<0.5.0,>=0.4.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (0.4.1)\n", + "Requirement already satisfied: murmurhash<1.1.0,>=0.28.0 in /usr/local/lib/python3.7/dist-packages (from spacy>=2.2.2->en_core_web_md==2.2.5) (1.0.5)\n", + "Requirement already satisfied: importlib-metadata>=0.20 in /usr/local/lib/python3.7/dist-packages (from catalogue<1.1.0,>=0.0.7->spacy>=2.2.2->en_core_web_md==2.2.5) (4.6.1)\n", + "Requirement already satisfied: typing-extensions>=3.6.4 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata>=0.20->catalogue<1.1.0,>=0.0.7->spacy>=2.2.2->en_core_web_md==2.2.5) (3.7.4.3)\n", + "Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata>=0.20->catalogue<1.1.0,>=0.0.7->spacy>=2.2.2->en_core_web_md==2.2.5) (3.5.0)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.13.0->spacy>=2.2.2->en_core_web_md==2.2.5) (3.0.4)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.13.0->spacy>=2.2.2->en_core_web_md==2.2.5) (2.10)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.13.0->spacy>=2.2.2->en_core_web_md==2.2.5) (1.24.3)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests<3.0.0,>=2.13.0->spacy>=2.2.2->en_core_web_md==2.2.5) (2021.5.30)\n", + "\u001b[38;5;2m✔ Download and installation successful\u001b[0m\n", + "You can now load the model via spacy.load('en_core_web_md')\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "sJBbY7MjZaoZ" + }, + "source": [ + "import Levenshtein\n", + "import spacy\n", + "import numpy as np\n", + "import gzip\n", + "import en_core_web_md" + ], + "execution_count": 12, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "o3ZqmcslYIt7" + }, + "source": [ + "**Character length Ratio**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "MfVb0Qj9YQ3B" + }, + "source": [ + "def get_character_length_ratio(original_seq, target_seq):\n", + " \"\"\"\n", + " Return the ratio (in %) of the length of the target sequence\n", + " to the length of the original sequence.\n", + " \"\"\"\n", + " return (len(target_seq) / len(original_seq)) * 100" + ], + "execution_count": 13, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "X7md-JcUYGae" + }, + "source": [ + "**Replace-Only Levenshtein Similarity**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "5rpJMeEzYR-T" + }, + "source": [ + "def get_replace_only_levenshtein_similarity(original_seq, target_seq):\n", + " \"\"\"\n", + " Return the ratio (in %) of the Levenshtein distance between the target \n", + " sequence and the original sequence, where only replacements are considered.\n", + " \"\"\"\n", + " distance = len(\n", + " [\n", + " _\n", + " for operation, _, _ in Levenshtein.editops(original_seq, target_seq)\n", + " if operation == \"replace\"\n", + " ]\n", + " )\n", + " max_replace_only_distance = min(len(original_seq), len(target_seq))\n", + " if max_replace_only_distance == 0:\n", + " return 0\n", + " return (1 - (distance / max_replace_only_distance)) * 100" + ], + "execution_count": 14, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MfCOaryAX7-M" + }, + "source": [ + "**Word Frequency Ratio**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "yT3FjxJ8YX7P" + }, + "source": [ + "def yield_lines():\n", + " with gzip.open(\"./cc.en.300.vec.gz\", \"rt\") as f:\n", + " for line in f:\n", + " yield line.rstrip('\\n')\n", + "\n", + "def get_word2rank(vocab_size=60000):\n", + " word2rank = {}\n", + " line_generator = yield_lines()\n", + " next(line_generator)\n", + " for i, line in enumerate(line_generator):\n", + " if (i + 1) > vocab_size:\n", + " break\n", + " word = line.split(\" \")[0]\n", + " word2rank[word] = i\n", + " return word2rank\n", + "\n", + "def is_content_token(token):\n", + " return not token.is_stop and not token.is_punct and token.ent_type_ == '' # Not named entity\n", + "\n", + "def get_content_words(text, spacy_model):\n", + " spacy_tokenizer = spacy_model.Defaults.create_tokenizer(spacy_model)\n", + " spacy_content_tokens = [token for token in spacy_tokenizer(text) if is_content_token(token)]\n", + " return [token.text for token in spacy_content_tokens]\n", + "\n", + "def get_log_ranks(text, spacy_model, word2rank):\n", + " return [\n", + " np.log(1 + word2rank.get(word, len(word2rank)))\n", + " for word in get_content_words(text, spacy_model)\n", + " if word in word2rank\n", + " ]\n", + "\n", + "def get_word_rank_ratio(original_seq, target_seq, spacy_model, word2rank):\n", + " \"\"\"\n", + " Return the ratio (in %) of the word rank of the target sequence\n", + " to the word rank of the original sequence.\n", + " \"\"\" \n", + " orig_log_ranks = get_log_ranks(original_seq, spacy_model, word2rank)\n", + " target_log_ranks = get_log_ranks(target_seq, spacy_model, word2rank)\n", + " if len(orig_log_ranks) == 0:\n", + " orig_log_ranks = [np.log(1 + len(word2rank))]\n", + " if len(target_log_ranks) == 0:\n", + " target_log_ranks = [np.log(1 + len(word2rank))]\n", + " \n", + " orig_log_rank = np.quantile(orig_log_ranks, 0.75)\n", + " target_log_rank = np.quantile(target_log_ranks, 0.75)\n", + " \n", + " return (target_log_rank / orig_log_rank) * 100" + ], + "execution_count": 48, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "4H-HSX-nX2bh" + }, + "source": [ + "**Dependency Tree Depth Ratio**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "tCKmAOQtUrwC" + }, + "source": [ + "def get_subtree_depth(node):\n", + " if len(list(node.children)) == 0:\n", + " return 0\n", + " return 1 + max([get_subtree_depth(child) for child in node.children])\n", + "\n", + "\n", + "def get_dependency_tree_depth_ratio(original_seq, target_seq, model):\n", + " \"\"\"\n", + " Return the ratio (in %) of the depth of the dependency tree of the target \n", + " sequence to the depth of the dependency tree of the original sequence.\n", + " \"\"\"\n", + " original_tree_depths = [\n", + " get_subtree_depth(spacy_sentence.root)\n", + " for spacy_sentence in model(str(original_seq)).sents\n", + " ]\n", + " target_tree_depths = [\n", + " get_subtree_depth(spacy_sentence.root)\n", + " for spacy_sentence in model(str(target_seq)).sents\n", + " ]\n", + " original_tree_depth = 0 if len(original_tree_depths) == 0 else max(original_tree_depths)\n", + " target_tree_depth = 0 if len(target_tree_depths) == 0 else max(target_tree_depths)\n", + "\n", + " return 0 if original_tree_depth == 0 else (target_tree_depth / original_tree_depth) * 100" + ], + "execution_count": 44, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "659_m7_pwboN" + }, + "source": [ + "**Prepend the paraphrases with Control Tokens**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "vHfM0rw4wa8m" + }, + "source": [ + "def prepend_control_tokens(paraphrases):\n", + " \"\"\"\n", + " Return the list of paraphrases where each original sequence is prepended\n", + " by the control tokens.\n", + " \"\"\"\n", + " spacy_model = en_core_web_md.load()\n", + " word2rank = get_word2rank(vocab_size=60000)\n", + " final_pps = []\n", + " for orig_seq, target_seq in paraphrases:\n", + " tokens = []\n", + " tokens.append(get_character_length_ratio(orig_seq, target_seq))\n", + " tokens.append(get_replace_only_levenshtein_similarity(orig_seq, target_seq))\n", + " tokens.append(get_word_rank_ratio(orig_seq, target_seq, spacy_model, word2rank))\n", + " tokens.append(get_dependency_tree_depth_ratio(orig_seq, target_seq, spacy_model))\n", + " \n", + " # Round the ratios in a fixed interval of 0.05 (5%) and capped to \n", + " # a maximum ratio of 2 (200%)\n", + " mod_tokens = []\n", + " for token in tokens:\n", + " token = round(token / 5) * 5\n", + " token = min(max(5, token), 200)\n", + " mod_tokens.append(token)\n", + "\n", + " CTRL_TOKEN = \" \".format(\n", + " mod_tokens[0], mod_tokens[1], mod_tokens[2], mod_tokens[3]\n", + " )\n", + " orig_seq = CTRL_TOKEN + orig_seq\n", + " final_pps.append((orig_seq, target_seq))\n", + " \n", + " return final_pps\n", + "\n", + "\n", + "def prepend_control_tokens_for_inference(sentence, tokens):\n", + " \"\"\"\n", + " Return the sentence encoded with the control tokens for inference\n", + " \"\"\"\n", + " CTRL_TOKEN = \" \".format(\n", + " tokens[0], tokens[1], tokens[2], tokens[3]\n", + " )\n", + " return CTRL_TOKEN + sentence" + ], + "execution_count": 52, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "9ySYiYm7Zw2Q" + }, + "source": [ + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "h3ZtlqW0Uu7r" + }, + "source": [ + "### Leveraging Unsupervised Pretraining" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "fP3kV1QUw9i2", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "2829d073-a22d-4d92-b1be-b3a8934bed6b" + }, + "source": [ + "!pip install transformers" + ], + "execution_count": 18, + "outputs": [ + { + "output_type": "stream", + "text": [ + "Requirement already satisfied: transformers in /usr/local/lib/python3.7/dist-packages (4.9.1)\n", + "Requirement already satisfied: importlib-metadata in /usr/local/lib/python3.7/dist-packages (from transformers) (4.6.1)\n", + "Requirement already satisfied: numpy>=1.17 in /usr/local/lib/python3.7/dist-packages (from transformers) (1.19.5)\n", + "Requirement already satisfied: tokenizers<0.11,>=0.10.1 in /usr/local/lib/python3.7/dist-packages (from transformers) (0.10.3)\n", + "Requirement already satisfied: pyyaml>=5.1 in /usr/local/lib/python3.7/dist-packages (from transformers) (5.4.1)\n", + "Requirement already satisfied: tqdm>=4.27 in /usr/local/lib/python3.7/dist-packages (from transformers) (4.41.1)\n", + "Requirement already satisfied: packaging in /usr/local/lib/python3.7/dist-packages (from transformers) (21.0)\n", + "Requirement already satisfied: huggingface-hub==0.0.12 in /usr/local/lib/python3.7/dist-packages (from transformers) (0.0.12)\n", + "Requirement already satisfied: requests in /usr/local/lib/python3.7/dist-packages (from transformers) (2.23.0)\n", + "Requirement already satisfied: sacremoses in /usr/local/lib/python3.7/dist-packages (from transformers) (0.0.35)\n", + "Requirement already satisfied: regex!=2019.12.17 in /usr/local/lib/python3.7/dist-packages (from transformers) (2019.12.20)\n", + "Requirement already satisfied: filelock in /usr/local/lib/python3.7/dist-packages (from transformers) (3.0.12)\n", + "Requirement already satisfied: typing-extensions in /usr/local/lib/python3.7/dist-packages (from huggingface-hub==0.0.12->transformers) (3.7.4.3)\n", + "Requirement already satisfied: pyparsing>=2.0.2 in /usr/local/lib/python3.7/dist-packages (from packaging->transformers) (2.4.7)\n", + "Requirement already satisfied: zipp>=0.5 in /usr/local/lib/python3.7/dist-packages (from importlib-metadata->transformers) (3.5.0)\n", + "Requirement already satisfied: chardet<4,>=3.0.2 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (3.0.4)\n", + "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (2021.5.30)\n", + "Requirement already satisfied: idna<3,>=2.5 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (2.10)\n", + "Requirement already satisfied: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /usr/local/lib/python3.7/dist-packages (from requests->transformers) (1.24.3)\n", + "Requirement already satisfied: joblib in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (1.0.1)\n", + "Requirement already satisfied: click in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (7.1.2)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.7/dist-packages (from sacremoses->transformers) (1.15.0)\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "RH12F6pMxM1t" + }, + "source": [ + "import torch\n", + "\n", + "from torch.utils.data import DataLoader, Dataset\n", + "from transformers import BartTokenizerFast, BartForConditionalGeneration\n", + "from transformers import DataCollatorForSeq2Seq, Seq2SeqTrainingArguments, Seq2SeqTrainer" + ], + "execution_count": 19, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "u2C7ZP-fxP-t" + }, + "source": [ + "class PPDataset(Dataset):\n", + " \"\"\"\n", + " Custom dataset class for paraphrase generation\n", + " \"\"\"\n", + " def __init__(self, data, tokenizer):\n", + " self.encodings = {'input_ids': [], 'labels': []}\n", + " for pp in data:\n", + " source, target = pp\n", + " source = tokenizer.encode(source)\n", + " with tokenizer.as_target_tokenizer():\n", + " target = tokenizer.encode(target)\n", + " self.encodings['input_ids'].append(source)\n", + " self.encodings['labels'].append(target)\n", + " \n", + " def __len__(self):\n", + " return len(self.encodings['input_ids'])\n", + " \n", + " def __getitem__(self, index):\n", + " item = {key: val[index] for key, val in self.encodings.items()}\n", + " return item" + ], + "execution_count": 20, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "XlDFoyi-c-Wf" + }, + "source": [ + "def get_tokenizer():\n", + " \"\"\"\n", + " Return the pretrained BART tokenizer and add the control tokens to it\n", + " \"\"\"\n", + " tokenizer = BartTokenizerFast.from_pretrained('facebook/bart-large')\n", + " control_tokens = []\n", + " for token in ['NbChars', 'LevSim', 'WordFreq', 'DepTreeDepth']:\n", + " for i in range(5, 201, 5):\n", + " control_tokens.append(f'<{token}_{i}%>')\n", + " tokenizer.add_tokens(control_tokens)\n", + "\n", + " return tokenizer\n", + "\n", + "\n", + "def get_dataset(paraphrases, tokenizer):\n", + " \"\"\"\n", + " Create a dataset from the paraphrases\n", + " \"\"\"\n", + " dataset = PPDataset(paraphrases, tokenizer)\n", + " return dataset\n", + "\n", + "\n", + "def get_model(vocab_size):\n", + " \"\"\"\n", + " Return the pretrained BART model and add fix the token embeddings matrix\n", + " \"\"\"\n", + " model = BartForConditionalGeneration.from_pretrained('facebook/bart-large')\n", + " model.resize_token_embeddings(vocab_size)\n", + " return model\n", + "\n", + "\n", + "def get_training_arguments(epochs=10, batch_size=8):\n", + " \"\"\"\n", + " Rturn the training arguments\n", + " \"\"\"\n", + " args = Seq2SeqTrainingArguments(\n", + " output_dir = 'outputs',\n", + " learning_rate = 3e-5,\n", + " per_device_train_batch_size = batch_size,\n", + " weight_decay = 0.01,\n", + " num_train_epochs = epochs,\n", + " predict_with_generate = True\n", + " )\n", + " return args\n", + "\n", + "\n", + "def get_data_collator(tokenizer, model):\n", + " \"\"\"\n", + " Return the data collator for seq2seq model\n", + " \"\"\"\n", + " data_collator = DataCollatorForSeq2Seq(tokenizer, model=model)\n", + " return data_collator\n", + "\n", + "\n", + "def get_trainer(model, tokenizer, dataset, data_collator, training_arguments):\n", + " \"\"\"\n", + " Return the trainer for fine-tuning the pretrained BART model\n", + " \"\"\"\n", + " trainer = Seq2SeqTrainer(\n", + " model = model,\n", + " data_collator = data_collator,\n", + " args = training_arguments,\n", + " train_dataset = dataset,\n", + " tokenizer = tokenizer,\n", + " )\n", + " return trainer\n", + "\n", + "\n", + "def simplify(sentence, tokenizer, model):\n", + " \"\"\"\n", + " Return the simplified sentence\n", + " \"\"\"\n", + " tokenized_sentence = tokenizer.encode(sentence, return_tensors='pt')\n", + " output = model.generate(tokenized_sentence, num_beams=5)\n", + " output = [tokenizer.decode(g, skip_special_tokens=True, clean_up_tokenization_spaces=False) for g in output]\n", + " return output\n", + "\n", + "\n", + "def decode_sentence(encoded_sentence, tokenizer):\n", + " \"\"\"\n", + " Decode the encoded sentence\n", + " \"\"\"\n", + " decoded_sentence = tokenizer.decode(encoded_sentence, skip_special_tokens=True)\n", + " return decoded_sentence" + ], + "execution_count": 60, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "X52218GLZyU6" + }, + "source": [ + "---" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "eh1538ZJU4if" + }, + "source": [ + "### Training in action" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "bhl3BOZULd4J" + }, + "source": [ + "**Mine Paraphrases**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ml5L5usTLVah" + }, + "source": [ + "data = get_data()" + ], + "execution_count": 33, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "t3MEGrPxLjhg" + }, + "source": [ + "sentences = []\n", + "for doc in data:\n", + " sentences += doc_to_sentences(doc)\n", + "\n", + "sentences = filter_sentences(sentences, punc_ratio=0.1, lang_model=None, lang_prob=0.5)\n", + "sentences.append('This paragraph is tough to comprehend.')\n", + "sentences.append('This paragraph is very hard to understand.')\n", + "sequences = generate_sequences(sentences, max_chars=300)" + ], + "execution_count": 34, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "aud7khFSNpPU" + }, + "source": [ + "embeddings = np.ascontiguousarray(compute_embeddings(sequences, dim=512))" + ], + "execution_count": 36, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "Zh03ARKhz0fO" + }, + "source": [ + "emb_index = index_embeddings(embeddings)" + ], + "execution_count": 37, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "B-FdGLEUNm3K" + }, + "source": [ + "D, I = get_nearest_neighbors(emb_index, embeddings, k=8)\n", + "filtered_I = filter_nearest_neighbors(D, I, max_L2_dist=0.05)\n", + "filtered_I = filter_paraphrases(filtered_I, sequences, min_l_dist=0.2)\n", + "paraphrases = generate_aligned_paraphrases(filtered_I, sequences)" + ], + "execution_count": 38, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "7qssIpJ11XIv", + "outputId": "dbff7fed-98d8-4103-f9fb-7ac12d941a8e" + }, + "source": [ + "print(len(final_pps))\n", + "for source, target in final_pps:\n", + " print('Simple:', source)\n", + " print('Complex:', target, '\\n')" + ], + "execution_count": 67, + "outputs": [ + { + "output_type": "stream", + "text": [ + "72\n", + "Simple: The term Hindu was later used in some Sanskrit texts such as the later Rajataranginis of Kashmir (Hinduka, c. 1450) and some 16th- to 18th-century Bengali Gaudiya Vaishnava texts including Chaitanya Charitamrita and Chaitanya Bhagavata.\n", + "Complex: The term Hindu was later used occasionally in some Sanskrit texts such as the later Rajataranginis of Kashmir (Hinduka, c. 1450) and some 16th- to 18th-century Bengali Gaudiya Vaishnava texts, including Chaitanya Charitamrita and Chaitanya Bhagavata. \n", + "\n", + "Simple: These texts used to distinguish Hindus from Muslims who are called Yavanas (foreigners) or Mlecchas (barbarians), with the 16th-century Chaitanya Charitamrita text and the 17th century Bhakta Mala text using the phrase \"Hindu dharma\".\n", + "Complex: These texts used it to contrast Hindus from Muslims who are called Yavanas (foreigners) or Mlecchas (barbarians), with the 16th-century Chaitanya Charitamrita text and the 17th-century Bhakta Mala text using the phrase \"Hindu dharma\". \n", + "\n", + "Simple: The weak overcomes the stronger by Dharma, as over a king. Truly that Dharma is the Truth (Satya); Therefore, when a man speaks the Truth, they say, \"He speaks the Dharma\"; and if he speaks Dharma, they say, \"He speaks the Truth!\"\n", + "Complex: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God. \n", + "\n", + "Simple: Truly that Dharma is the Truth (Satya); Therefore, when a man speaks the Truth, they say, \"He speaks the Dharma\"; and if he speaks Dharma, they say, \"He speaks the Truth!\"\n", + "Complex: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God. \n", + "\n", + "Simple: The soul is believed to be eternal.\n", + "Complex: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God. \n", + "\n", + "Simple: The soul is believed to be eternal.\n", + "Complex: No one should compare themselves to them.\" \n", + "\n", + "Simple: The soul is believed to be eternal.\n", + "Complex: who also have their own messes. \n", + "\n", + "Simple: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God.\n", + "Complex: The weak overcomes the stronger by Dharma, as over a king. Truly that Dharma is the Truth (Satya); Therefore, when a man speaks the Truth, they say, \"He speaks the Dharma\"; and if he speaks Dharma, they say, \"He speaks the Truth!\" \n", + "\n", + "Simple: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God.\n", + "Complex: Truly that Dharma is the Truth (Satya); Therefore, when a man speaks the Truth, they say, \"He speaks the Dharma\"; and if he speaks Dharma, they say, \"He speaks the Truth!\" \n", + "\n", + "Simple: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God.\n", + "Complex: The soul is believed to be eternal. \n", + "\n", + "Simple: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God.\n", + "Complex: No one should compare themselves to them.\" \n", + "\n", + "Simple: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God.\n", + "Complex: who also have their own messes. \n", + "\n", + "Simple: They are two of the most decorated football players ever, having won a combined 66 trophies (Ronaldo 32, Messi 34) during their senior careers thus far, and have regularly broken the 50-goal barrier in a single season.\n", + "Complex: They are two of the most decorated football players ever, having won a combined 67 trophies (Ronaldo 32, Messi 35) during their senior careers thus far, and have regularly broken the 50-goal barrier in a single season. \n", + "\n", + "Simple: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's physical attributes, goalscoring skills, leadership and influence under pressure is well-appreciated worldwide.\n", + "Complex: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's leadership and influence under pressure is well-appreciated worldwide. \n", + "\n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third.\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third. \n", + "\n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. In total, Messi and Ronaldo reached the podium a record twelve times each.\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third. In total, Messi and Ronaldo have both won the World Cup twelve times. \n", + "\n", + "Simple: Messi during this period won five Pichichi trophies and European Golden Shoe awards (2010, 2012, 2013, 2017 and 2018), while Ronaldo won these prizes thrice each (2011, 2014 and 2015).\n", + "Complex: Messi during this period won five Pichichi trophies and European Golden Shoe awards (2010, 2012, 2013, 2017 and 2018), while Ronaldo won these prizes four times each (2008, 2011, 2014 and 2015). \n", + "\n", + "Simple: I think they have their own personal pride in terms of wanting to be the best.\" Messi has denied any rivalry, and blames the media for creating it, stating that \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano.\n", + "Complex: I think they have their own personal pride in terms of wanting to be the best.\" Messi himself denied any rivalry, saying that it was \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano.\" \n", + "\n", + "Simple: Although it was a bit difficult to see him win trophies, he gave La Liga prestige.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\"\n", + "Complex: It was a little hard for him to win trophies, but he gave La Liga importance.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\" \n", + "\n", + "Simple: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\"\n", + "Complex: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\" \n", + "\n", + "Simple: --- == Awards and records == Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken a multitude of goalscoring records for both club and country, feats which have been described as \"incredible\", \"ridiculous\", and \"remarkable\", respectively.\n", + "Complex: == Awards and records == Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken a multitude of goalscoring records for both club and country, feats which have been described as \"incredible\", \"ridiculous\" and \"remarkable\". \n", + "\n", + "Simple: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level.\n", + "Complex: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. \n", + "\n", + "Simple: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level.\n", + "Complex: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\" \n", + "\n", + "Simple: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. Nobody has the right to compare themselves to them.\"\n", + "Complex: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\" \n", + "\n", + "Simple: Moreover, Messi was runner-up at three Copa Américas and at the 2014 World Cup, before finally claiming his first major international trophy at the 2021 Copa América where he was named joint-best player and top scorer.\n", + "Complex: Moreover, Messi was runner-up at three Copa Américas and at the 2014 World Cup, before finally claiming his first major international trophy at the 2021 Copa América where he was named best player and joint top scorer. \n", + "\n", + "Simple: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's leadership and influence under pressure is well-appreciated worldwide.\n", + "Complex: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's physical attributes, goalscoring skills, leadership and influence under pressure is well-appreciated worldwide. \n", + "\n", + "Simple: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's leadership and influence under pressure is well-appreciated worldwide.\n", + "Complex: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's physical attributes, goalscoring skills, leadership and influence under pressure is well-appreciated worldwide. \n", + "\n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third.\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. \n", + "\n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third.\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. \n", + "\n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third. In total, Messi and Ronaldo have both won the World Cup twelve times.\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. In total, Messi and Ronaldo reached the podium a record twelve times each. \n", + "\n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third. In total, Messi and Ronaldo have both won the World Cup twelve times.\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. In total, Messi and Ronaldo reached the podium a record twelve times each. \n", + "\n", + "Simple: == Relationship between Messi and Ronaldo == In a 2015 interview, Ronaldo talked about the rivalry. He said: \"Sometimes we push each other a little bit, that's why the competition is so good.\n", + "Complex: == Relationship between Messi and Ronaldo == In a 2015 interview, Ronaldo commented on the rivalry by saying: \"I think we push each other sometimes in the competition, this is why the competition is so high.\" \n", + "\n", + "Simple: It was a little hard for him to win trophies, but he gave La Liga importance.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\"\n", + "Complex: Although it was a bit difficult to see him win trophies, he gave La Liga prestige.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\" \n", + "\n", + "Simple: It was a little hard for him to win trophies, but he gave La Liga importance.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\"\n", + "Complex: Although it was a bit difficult to see him win trophies, he gave La Liga prestige.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\" \n", + "\n", + "Simple: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\"\n", + "Complex: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\" \n", + "\n", + "Simple: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\"\n", + "Complex: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\" \n", + "\n", + "Simple: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo.\n", + "Complex: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. \n", + "\n", + "Simple: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo.\n", + "Complex: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. \n", + "\n", + "Simple: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\"\n", + "Complex: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. Nobody has the right to compare themselves to them.\" \n", + "\n", + "Simple: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\"\n", + "Complex: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. Nobody has the right to compare themselves to them.\" \n", + "\n", + "Simple: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\"\n", + "Complex: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. \n", + "\n", + "Simple: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\"\n", + "Complex: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. \n", + "\n", + "Simple: No one should compare themselves to them.\"\n", + "Complex: who also have their own messes. \n", + "\n", + "Simple: No one should compare themselves to them.\"\n", + "Complex: The soul is believed to be eternal. \n", + "\n", + "Simple: No one should compare themselves to them.\"\n", + "Complex: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God. \n", + "\n", + "Simple: He has also won four national cups, two league cups, six national super cups, and four FIFA Club World Cups.\n", + "Complex: He has also won four national cups, two league cups, six national super cups, two European Super Cups, and four FIFA Club World Cups. \n", + "\n", + "Simple: who also have their own messes.\n", + "Complex: No one should compare themselves to them.\" \n", + "\n", + "Simple: who also have their own messes.\n", + "Complex: The soul is believed to be eternal. \n", + "\n", + "Simple: who also have their own messes.\n", + "Complex: 'Sins' and evil-doings of the devotee are said to fall away of their own accord, the devotee shriven, limitedness even transcended, through the love of God. \n", + "\n", + "Simple: He is the first footballer and the third sportsman to earn $1 billion in their career.\n", + "Complex: He is the first footballer and only the third sportsman to earn $1 billion in their career. \n", + "\n", + "Simple: I think they have their own personal pride in terms of wanting to be the best.\" Messi himself denied any rivalry, saying that it was \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano.\"\n", + "Complex: I think they have their own personal pride in terms of wanting to be the best.\" Messi has denied any rivalry, and blames the media for creating it, stating that \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano. \n", + "\n", + "Simple: I think they have their own personal pride in terms of wanting to be the best.\" Messi himself denied any rivalry, saying that it was \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano.\"\n", + "Complex: I think they have their own personal pride in terms of wanting to be the best.\" Messi has denied any rivalry, and blames the media for creating it, stating that \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano. \n", + "\n", + "Simple: He is the first footballer and only the third sportsman to earn $1 billion in their career.\n", + "Complex: He is the first footballer and the third sportsman to earn $1 billion in their career. \n", + "\n", + "Simple: They are two of the most decorated football players ever, having won a combined 67 trophies (Ronaldo 32, Messi 35) during their senior careers thus far, and have regularly broken the 50-goal barrier in a single season.\n", + "Complex: They are two of the most decorated football players ever, having won a combined 66 trophies (Ronaldo 32, Messi 34) during their senior careers thus far, and have regularly broken the 50-goal barrier in a single season. \n", + "\n", + "Simple: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's physical attributes, goalscoring skills, leadership and influence under pressure is well-appreciated worldwide.\n", + "Complex: Messi's combination of dribbling, technical, playmaking, and goalscoring skills is often considered among the very best in history, while Ronaldo's leadership and influence under pressure is well-appreciated worldwide. \n", + "\n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third.\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third. \n", + "\n", + "Simple: In 2019, Messi took the lead again by earning his sixth Ballon d'Or, finishing just seven points ahead of second-placed Virgil van Dijk, with Ronaldo finishing third. In total, Messi and Ronaldo reached the podium a record twelve times each.\n", + "Complex: In 2019, Messi took the lead again by earning his sixth Ballon d'Or. He finished just seven points ahead of second-placed Virgil van Dijk, with Ronaldo third. In total, Messi and Ronaldo have both won the World Cup twelve times. \n", + "\n", + "Simple: Messi during this period won five Pichichi trophies and European Golden Shoe awards (2010, 2012, 2013, 2017 and 2018), while Ronaldo won these prizes four times each (2008, 2011, 2014 and 2015).\n", + "Complex: Messi during this period won five Pichichi trophies and European Golden Shoe awards (2010, 2012, 2013, 2017 and 2018), while Ronaldo won these prizes thrice each (2011, 2014 and 2015). \n", + "\n", + "Simple: == Relationship between Messi and Ronaldo == In a 2015 interview, Ronaldo commented on the rivalry by saying: \"I think we push each other sometimes in the competition, this is why the competition is so high.\"\n", + "Complex: == Relationship between Messi and Ronaldo == In a 2015 interview, Ronaldo talked about the rivalry. He said: \"Sometimes we push each other a little bit, that's why the competition is so good. \n", + "\n", + "Simple: I think they have their own personal pride in terms of wanting to be the best.\" Messi has denied any rivalry, and blames the media for creating it, stating that \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano.\n", + "Complex: I think they have their own personal pride in terms of wanting to be the best.\" Messi himself denied any rivalry, saying that it was \"only the media, the press, who wants us to be at loggerheads but I've never fought with Cristiano.\" \n", + "\n", + "Simple: Although it was a bit difficult to see him win trophies, he gave La Liga prestige.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\"\n", + "Complex: It was a little hard for him to win trophies, but he gave La Liga importance.\" During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\" \n", + "\n", + "Simple: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi later replied: \"If I get an invitation, why not?\"\n", + "Complex: During a joint interview at the UEFA Player of the Year ceremony in 2019, Ronaldo said he would like to \"have dinner together in the future\", to which Messi said: \"If I get an invitation, why not?\" \n", + "\n", + "Simple: == Awards and records == Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken a multitude of goalscoring records for both club and country, feats which have been described as \"incredible\", \"ridiculous\" and \"remarkable\".\n", + "Complex: --- == Awards and records == Throughout the existence of the rivalry, the pair have dominated awards ceremonies and broken a multitude of goalscoring records for both club and country, feats which have been described as \"incredible\", \"ridiculous\", and \"remarkable\", respectively. \n", + "\n", + "Simple: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level.\n", + "Complex: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. \n", + "\n", + "Simple: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level.\n", + "Complex: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\" \n", + "\n", + "Simple: In an interview for the France Football, Modrić stated that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Cristiano Ronaldo and Lionel Messi, who are players at another level. Nobody has the right to compare themselves to them.\"\n", + "Complex: In an interview for the France Football, Modrić said that \"history will say that a Croatian player, representing his small country, won the Ballon d'Or after Lionel Messi and Cristiano Ronaldo. No one should compare themselves to them.\" \n", + "\n", + "Simple: He has also won four national cups, two league cups, six national super cups, two European Super Cups, and four FIFA Club World Cups.\n", + "Complex: He has also won four national cups, two league cups, six national super cups, and four FIFA Club World Cups. \n", + "\n", + "Simple: Moreover, Messi was runner-up at three Copa Américas and at the 2014 World Cup, before finally claiming his first major international trophy at the 2021 Copa América where he was named best player and joint top scorer.\n", + "Complex: Moreover, Messi was runner-up at three Copa Américas and at the 2014 World Cup, before finally claiming his first major international trophy at the 2021 Copa América where he was named joint-best player and top scorer. \n", + "\n", + "Simple: The term Hindu was later used occasionally in some Sanskrit texts such as the later Rajataranginis of Kashmir (Hinduka, c. 1450) and some 16th- to 18th-century Bengali Gaudiya Vaishnava texts, including Chaitanya Charitamrita and Chaitanya Bhagavata.\n", + "Complex: The term Hindu was later used in some Sanskrit texts such as the later Rajataranginis of Kashmir (Hinduka, c. 1450) and some 16th- to 18th-century Bengali Gaudiya Vaishnava texts including Chaitanya Charitamrita and Chaitanya Bhagavata. \n", + "\n", + "Simple: These texts used it to contrast Hindus from Muslims who are called Yavanas (foreigners) or Mlecchas (barbarians), with the 16th-century Chaitanya Charitamrita text and the 17th-century Bhakta Mala text using the phrase \"Hindu dharma\".\n", + "Complex: These texts used to distinguish Hindus from Muslims who are called Yavanas (foreigners) or Mlecchas (barbarians), with the 16th-century Chaitanya Charitamrita text and the 17th century Bhakta Mala text using the phrase \"Hindu dharma\". \n", + "\n", + "Simple: This paragraph is tough to comprehend.\n", + "Complex: This paragraph is very hard to understand. \n", + "\n", + "Simple: This paragraph is very hard to understand.\n", + "Complex: This paragraph is tough to comprehend. \n", + "\n" + ], + "name": "stdout" + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0JVgbXvwNtu1" + }, + "source": [ + "**Add Control Tokens**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "ugItZ0s2N4k_" + }, + "source": [ + "final_pps = prepend_control_tokens(paraphrases)" + ], + "execution_count": 53, + "outputs": [] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zIMH3TFBXEtL" + }, + "source": [ + "**Fine-tune BART for Text Simlplification**" + ] + }, + { + "cell_type": "code", + "metadata": { + "id": "pxiIFuabXaCF" + }, + "source": [ + "tokenizer = get_tokenizer()" + ], + "execution_count": 55, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "mjHimSPLU-il" + }, + "source": [ + "dataset = get_dataset(final_pps, tokenizer)" + ], + "execution_count": 56, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "M44IGfhEXfhl" + }, + "source": [ + "model = get_model(len(tokenizer))" + ], + "execution_count": 57, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "id": "8QSEHQb2XRwo" + }, + "source": [ + "data_collator = get_data_collator(tokenizer, model)\n", + "args = get_training_arguments(epochs=1, batch_size=8)\n", + "trainer = get_trainer(model, tokenizer, dataset, data_collator, args)" + ], + "execution_count": null, + "outputs": [] + }, + { + "cell_type": "code", + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 312 + }, + "id": "daAnf0xdK844", + "outputId": "6d3cdc8d-a19e-4f5a-fbaa-d17015beef41" + }, + "source": [ + "trainer.train()" + ], + "execution_count": 65, + "outputs": [ + { + "output_type": "stream", + "text": [ + "***** Running training *****\n", + " Num examples = 72\n", + " Num Epochs = 1\n", + " Instantaneous batch size per device = 8\n", + " Total train batch size (w. parallel, distributed & accumulation) = 8\n", + " Gradient Accumulation steps = 1\n", + " Total optimization steps = 9\n" + ], + "name": "stderr" + }, + { + "output_type": "display_data", + "data": { + "text/html": [ + "\n", + "
\n", + " \n", + " \n", + " [9/9 04:07, Epoch 1/1]\n", + "
\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
StepTraining Loss

" + ], + "text/plain": [ + "" + ] + }, + "metadata": { + "tags": [] + } + }, + { + "output_type": "stream", + "text": [ + "\n", + "\n", + "Training completed. Do not forget to share your model on huggingface.co/models =)\n", + "\n", + "\n" + ], + "name": "stderr" + }, + { + "output_type": "execute_result", + "data": { + "text/plain": [ + "TrainOutput(global_step=9, training_loss=0.9259072409735786, metrics={'train_runtime': 278.6859, 'train_samples_per_second': 0.258, 'train_steps_per_second': 0.032, 'total_flos': 20367397158912.0, 'train_loss': 0.9259072409735786, 'epoch': 1.0})" + ] + }, + "metadata": { + "tags": [] + }, + "execution_count": 65 + } + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gVgdRbyBZ00U" + }, + "source": [ + "---" + ] + } + ] +} diff --git a/T4-Project_Report.pdf b/T4-Project_Report.pdf new file mode 100644 index 0000000..b63cb32 Binary files /dev/null and b/T4-Project_Report.pdf differ