diff --git a/module-1/map-reduce-filter/your-code/main.ipynb b/module-1/map-reduce-filter/your-code/main.ipynb index 51d50b0d..8fb147e4 100644 --- a/module-1/map-reduce-filter/your-code/main.ipynb +++ b/module-1/map-reduce-filter/your-code/main.ipynb @@ -12,11 +12,15 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 75, "metadata": {}, "outputs": [], "source": [ - "# Import reduce from functools, numpy and pandas" + "# Import reduce from functools, numpy and pandas\n", + "\n", + "from functools import reduce\n", + "import numpy as np\n", + "import pandas as pd\n" ] }, { @@ -32,7 +36,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 76, "metadata": {}, "outputs": [], "source": [ @@ -43,6 +47,26 @@ " prophet = f.read().split(' ')" ] }, + { + "cell_type": "code", + "execution_count": 77, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\ufeffThe'" + ] + }, + "execution_count": 77, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "prophet[0]" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -54,11 +78,33 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 78, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "# your code here\n", + "\n", + "prophet = prophet[568:]\n" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'PROPHET\\n\\n|Almustafa,'" + ] + }, + "execution_count": 79, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "prophet[0]" ] }, { @@ -70,11 +116,1052 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 80, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "['PROPHET\\n\\n|Almustafa,',\n", + " 'the{7}',\n", + " 'chosen',\n", + " 'and',\n", + " 'the\\nbeloved,',\n", + " 'who',\n", + " 'was',\n", + " 'a',\n", + " 'dawn',\n", + " 'unto']" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# your code here\n", + "prophet[0:10]" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['PROPHET\\n\\n|Almustafa,',\n", + " 'the{7}',\n", + " 'chosen',\n", + " 'and',\n", + " 'the\\nbeloved,',\n", + " 'who',\n", + " 'was',\n", + " 'a',\n", + " 'dawn',\n", + " 'unto',\n", + " 'his',\n", + " 'own\\nday,',\n", + " 'had',\n", + " 'waited',\n", + " 'twelve',\n", + " 'years',\n", + " 'in',\n", + " 'the',\n", + " 'city\\nof',\n", + " 'Orphalese',\n", + " 'for',\n", + " 'his',\n", + " 'ship',\n", + " 'that',\n", + " 'was',\n", + " 'to\\nreturn',\n", + " 'and',\n", + " 'bear',\n", + " 'him',\n", + " 'back',\n", + " 'to',\n", + " 'the',\n", + " 'isle',\n", + " 'of\\nhis',\n", + " 'birth.\\n\\nAnd',\n", + " 'in',\n", + " 'the',\n", + " 'twelfth',\n", + " 'year,',\n", + " 'on',\n", + " 'the',\n", + " 'seventh\\nday',\n", + " 'of',\n", + " 'Ielool,',\n", + " 'the',\n", + " 'month',\n", + " 'of',\n", + " 'reaping,',\n", + " 'he\\nclimbed',\n", + " 'the',\n", + " 'hill',\n", + " 'without',\n", + " 'the',\n", + " 'city',\n", + " 'walls\\nand',\n", + " 'looked',\n", + " 'seaward;',\n", + " 'and',\n", + " 'he',\n", + " 'beheld',\n", + " 'his\\nship',\n", + " 'coming',\n", + " 'with',\n", + " 'the',\n", + " 'mist.\\n\\nThen',\n", + " 'the',\n", + " 'gates',\n", + " 'of',\n", + " 'his',\n", + " 'heart',\n", + " 'were',\n", + " 'flung\\nopen,',\n", + " 'and',\n", + " 'his',\n", + " 'joy',\n", + " 'flew',\n", + " 'far',\n", + " 'over',\n", + " 'the',\n", + " 'sea.\\nAnd',\n", + " 'he',\n", + " 'closed',\n", + " 'his',\n", + " 'eyes',\n", + " 'and',\n", + " 'prayed',\n", + " 'in',\n", + " 'the\\nsilences',\n", + " 'of',\n", + " 'his',\n", + " 'soul.\\n\\n*****\\n\\nBut',\n", + " 'as',\n", + " 'he',\n", + " 'descended',\n", + " 'the',\n", + " 'hill,',\n", + " 'a',\n", + " 'sadness\\ncame',\n", + " 'upon',\n", + " 'him,',\n", + " 'and',\n", + " 'he',\n", + " 'thought',\n", + " 'in',\n", + " 'his\\nheart:\\n\\nHow',\n", + " 'shall',\n", + " 'I',\n", + " 'go',\n", + " 'in',\n", + " 'peace',\n", + " 'and',\n", + " 'without\\nsorrow?',\n", + " 'Nay,',\n", + " 'not',\n", + " 'without',\n", + " 'a',\n", + " 'wound',\n", + " 'in',\n", + " 'the\\nspirit',\n", + " 'shall',\n", + " 'I',\n", + " 'leave',\n", + " 'this',\n", + " 'city.',\n", + " '{8}Long\\nwere',\n", + " 'the',\n", + " 'days',\n", + " 'of',\n", + " 'pain',\n", + " 'I',\n", + " 'have',\n", + " 'spent\\nwithin',\n", + " 'its',\n", + " 'walls,',\n", + " 'and',\n", + " 'long',\n", + " 'were',\n", + " 'the\\nnights',\n", + " 'of',\n", + " 'aloneness;',\n", + " 'and',\n", + " 'who',\n", + " 'can',\n", + " 'depart\\nfrom',\n", + " 'his',\n", + " 'pain',\n", + " 'and',\n", + " 'his',\n", + " 'aloneness',\n", + " 'without\\nregret?\\n\\nToo',\n", + " 'many',\n", + " 'fragments',\n", + " 'of',\n", + " 'the',\n", + " 'spirit',\n", + " 'have',\n", + " 'I\\nscattered',\n", + " 'in',\n", + " 'these',\n", + " 'streets,',\n", + " 'and',\n", + " 'too',\n", + " 'many\\nare',\n", + " 'the',\n", + " 'children',\n", + " 'of',\n", + " 'my',\n", + " 'longing',\n", + " 'that',\n", + " 'walk\\nnaked',\n", + " 'among',\n", + " 'these',\n", + " 'hills,',\n", + " 'and',\n", + " 'I',\n", + " 'cannot\\nwithdraw',\n", + " 'from',\n", + " 'them',\n", + " 'without',\n", + " 'a',\n", + " 'burden',\n", + " 'and\\nan',\n", + " 'ache.\\n\\nIt',\n", + " 'is',\n", + " 'not',\n", + " 'a',\n", + " 'garment',\n", + " 'I',\n", + " 'cast',\n", + " 'off',\n", + " 'this\\nday,',\n", + " 'but',\n", + " 'a',\n", + " 'skin',\n", + " 'that',\n", + " 'I',\n", + " 'tear',\n", + " 'with',\n", + " 'my',\n", + " 'own\\nhands.\\n\\nNor',\n", + " 'is',\n", + " 'it',\n", + " 'a',\n", + " 'thought',\n", + " 'I',\n", + " 'leave',\n", + " 'behind',\n", + " 'me,\\nbut',\n", + " 'a',\n", + " 'heart',\n", + " 'made',\n", + " 'sweet',\n", + " 'with',\n", + " 'hunger',\n", + " 'and\\nwith',\n", + " 'thirst.\\n\\n*****\\n\\nYet',\n", + " 'I',\n", + " 'cannot',\n", + " 'tarry',\n", + " 'longer.\\n\\nThe',\n", + " 'sea',\n", + " 'that',\n", + " 'calls',\n", + " 'all',\n", + " 'things',\n", + " 'unto',\n", + " 'her\\ncalls',\n", + " 'me,',\n", + " 'and',\n", + " 'I',\n", + " 'must',\n", + " 'embark.\\n\\nFor',\n", + " 'to',\n", + " 'stay,',\n", + " 'though',\n", + " 'the',\n", + " 'hours',\n", + " 'burn',\n", + " 'in\\nthe',\n", + " 'night,',\n", + " 'is',\n", + " 'to',\n", + " 'freeze',\n", + " 'and',\n", + " 'crystallize\\nand',\n", + " 'be',\n", + " 'bound',\n", + " 'in',\n", + " 'a',\n", + " 'mould.\\n\\nFain',\n", + " 'would',\n", + " 'I',\n", + " 'take',\n", + " 'with',\n", + " 'me',\n", + " 'all',\n", + " 'that',\n", + " 'is\\nhere.',\n", + " 'But',\n", + " 'how',\n", + " 'shall',\n", + " 'I?\\n\\nA',\n", + " 'voice',\n", + " 'cannot',\n", + " 'carry',\n", + " 'the',\n", + " 'tongue',\n", + " 'and\\n{9}the',\n", + " 'lips',\n", + " 'that',\n", + " 'gave',\n", + " 'it',\n", + " 'wings.',\n", + " 'Alone\\nmust',\n", + " 'it',\n", + " 'seek',\n", + " 'the',\n", + " 'ether.\\n\\nAnd',\n", + " 'alone',\n", + " 'and',\n", + " 'without',\n", + " 'his',\n", + " 'nest',\n", + " 'shall',\n", + " 'the\\neagle',\n", + " 'fly',\n", + " 'across',\n", + " 'the',\n", + " 'sun.\\n\\n*****\\n\\nNow',\n", + " 'when',\n", + " 'he',\n", + " 'reached',\n", + " 'the',\n", + " 'foot',\n", + " 'of',\n", + " 'the\\nhill,',\n", + " 'he',\n", + " 'turned',\n", + " 'again',\n", + " 'towards',\n", + " 'the',\n", + " 'sea,\\nand',\n", + " 'he',\n", + " 'saw',\n", + " 'his',\n", + " 'ship',\n", + " 'approaching',\n", + " 'the\\nharbour,',\n", + " 'and',\n", + " 'upon',\n", + " 'her',\n", + " 'prow',\n", + " 'the',\n", + " 'mariners,\\nthe',\n", + " 'men',\n", + " 'of',\n", + " 'his',\n", + " 'own',\n", + " 'land.\\n\\nAnd',\n", + " 'his',\n", + " 'soul',\n", + " 'cried',\n", + " 'out',\n", + " 'to',\n", + " 'them,',\n", + " 'and',\n", + " 'he\\nsaid:\\n\\nSons',\n", + " 'of',\n", + " 'my',\n", + " 'ancient',\n", + " 'mother,',\n", + " 'you',\n", + " 'riders',\n", + " 'of\\nthe',\n", + " 'tides,\\n\\nHow',\n", + " 'often',\n", + " 'have',\n", + " 'you',\n", + " 'sailed',\n", + " 'in',\n", + " 'my',\n", + " 'dreams.\\nAnd',\n", + " 'now',\n", + " 'you',\n", + " 'come',\n", + " 'in',\n", + " 'my',\n", + " 'awakening,',\n", + " 'which\\nis',\n", + " 'my',\n", + " 'deeper',\n", + " 'dream.\\n\\nReady',\n", + " 'am',\n", + " 'I',\n", + " 'to',\n", + " 'go,',\n", + " 'and',\n", + " 'my',\n", + " 'eagerness',\n", + " 'with\\nsails',\n", + " 'full',\n", + " 'set',\n", + " 'awaits',\n", + " 'the',\n", + " 'wind.\\n\\nOnly',\n", + " 'another',\n", + " 'breath',\n", + " 'will',\n", + " 'I',\n", + " 'breathe',\n", + " 'in\\nthis',\n", + " 'still',\n", + " 'air,',\n", + " 'only',\n", + " 'another',\n", + " 'loving',\n", + " 'look\\ncast',\n", + " 'backward,\\n\\nAnd',\n", + " 'then',\n", + " 'I',\n", + " 'shall',\n", + " 'stand',\n", + " 'among',\n", + " 'you,',\n", + " 'a\\nseafarer',\n", + " 'among',\n", + " 'seafarers.',\n", + " '{10}And',\n", + " 'you,\\nvast',\n", + " 'sea,',\n", + " 'sleepless',\n", + " 'mother,\\n\\nWho',\n", + " 'alone',\n", + " 'are',\n", + " 'peace',\n", + " 'and',\n", + " 'freedom',\n", + " 'to',\n", + " 'the\\nriver',\n", + " 'and',\n", + " 'the',\n", + " 'stream,\\n\\nOnly',\n", + " 'another',\n", + " 'winding',\n", + " 'will',\n", + " 'this',\n", + " 'stream\\nmake,',\n", + " 'only',\n", + " 'another',\n", + " 'murmur',\n", + " 'in',\n", + " 'this',\n", + " 'glade,\\n\\nAnd',\n", + " 'then',\n", + " 'shall',\n", + " 'I',\n", + " 'come',\n", + " 'to',\n", + " 'you,',\n", + " 'a\\nboundless',\n", + " 'drop',\n", + " 'to',\n", + " 'a',\n", + " 'boundless',\n", + " 'ocean.\\n\\n*****\\n\\nAnd',\n", + " 'as',\n", + " 'he',\n", + " 'walked',\n", + " 'he',\n", + " 'saw',\n", + " 'from',\n", + " 'afar',\n", + " 'men\\nand',\n", + " 'women',\n", + " 'leaving',\n", + " 'their',\n", + " 'fields',\n", + " 'and',\n", + " 'their\\nvineyards',\n", + " 'and',\n", + " 'hastening',\n", + " 'towards',\n", + " 'the',\n", + " 'city\\ngates.\\n\\nAnd',\n", + " 'he',\n", + " 'heard',\n", + " 'their',\n", + " 'voices',\n", + " 'calling',\n", + " 'his\\nname,',\n", + " 'and',\n", + " 'shouting',\n", + " 'from',\n", + " 'field',\n", + " 'to',\n", + " 'field\\ntelling',\n", + " 'one',\n", + " 'another',\n", + " 'of',\n", + " 'the',\n", + " 'coming',\n", + " 'of',\n", + " 'his\\nship.\\n\\nAnd',\n", + " 'he',\n", + " 'said',\n", + " 'to',\n", + " 'himself:\\n\\nShall',\n", + " 'the',\n", + " 'day',\n", + " 'of',\n", + " 'parting',\n", + " 'be',\n", + " 'the',\n", + " 'day',\n", + " 'of\\ngathering?\\n\\nAnd',\n", + " 'shall',\n", + " 'it',\n", + " 'be',\n", + " 'said',\n", + " 'that',\n", + " 'my',\n", + " 'eve',\n", + " 'was',\n", + " 'in\\ntruth',\n", + " 'my',\n", + " 'dawn?\\n\\nAnd',\n", + " 'what',\n", + " 'shall',\n", + " 'I',\n", + " 'give',\n", + " 'unto',\n", + " 'him',\n", + " 'who',\n", + " 'has\\nleft',\n", + " 'his',\n", + " 'plough',\n", + " 'in',\n", + " 'midfurrow,',\n", + " 'or',\n", + " 'to\\nhim',\n", + " 'who',\n", + " 'has',\n", + " 'stopped',\n", + " 'the',\n", + " 'wheel',\n", + " 'of',\n", + " 'his\\nwinepress?',\n", + " '{11}Shall',\n", + " 'my',\n", + " 'heart',\n", + " 'become',\n", + " 'a\\ntree',\n", + " 'heavy-laden',\n", + " 'with',\n", + " 'fruit',\n", + " 'that',\n", + " 'I',\n", + " 'may\\ngather',\n", + " 'and',\n", + " 'give',\n", + " 'unto',\n", + " 'them?\\n\\nAnd',\n", + " 'shall',\n", + " 'my',\n", + " 'desires',\n", + " 'flow',\n", + " 'like',\n", + " 'a\\nfountain',\n", + " 'that',\n", + " 'I',\n", + " 'may',\n", + " 'fill',\n", + " 'their',\n", + " 'cups?\\n\\nAm',\n", + " 'I',\n", + " 'a',\n", + " 'harp',\n", + " 'that',\n", + " 'the',\n", + " 'hand',\n", + " 'of',\n", + " 'the',\n", + " 'mighty\\nmay',\n", + " 'touch',\n", + " 'me,',\n", + " 'or',\n", + " 'a',\n", + " 'flute',\n", + " 'that',\n", + " 'his',\n", + " 'breath\\nmay',\n", + " 'pass',\n", + " 'through',\n", + " 'me?\\n\\nA',\n", + " 'seeker',\n", + " 'of',\n", + " 'silences',\n", + " 'am',\n", + " 'I,',\n", + " 'and',\n", + " 'what\\ntreasure',\n", + " 'have',\n", + " 'I',\n", + " 'found',\n", + " 'in',\n", + " 'silences',\n", + " 'that',\n", + " 'I\\nmay',\n", + " 'dispense',\n", + " 'with',\n", + " 'confidence?\\n\\nIf',\n", + " 'this',\n", + " 'is',\n", + " 'my',\n", + " 'day',\n", + " 'of',\n", + " 'harvest,',\n", + " 'in',\n", + " 'what\\nfields',\n", + " 'have',\n", + " 'I',\n", + " 'sowed',\n", + " 'the',\n", + " 'seed,',\n", + " 'and',\n", + " 'in\\nwhat',\n", + " 'unremembered',\n", + " 'seasons?\\n\\nIf',\n", + " 'this',\n", + " 'indeed',\n", + " 'be',\n", + " 'the',\n", + " 'hour',\n", + " 'in',\n", + " 'which',\n", + " 'I\\nlift',\n", + " 'up',\n", + " 'my',\n", + " 'lantern,',\n", + " 'it',\n", + " 'is',\n", + " 'not',\n", + " 'my',\n", + " 'flame\\nthat',\n", + " 'shall',\n", + " 'burn',\n", + " 'therein.\\n\\nEmpty',\n", + " 'and',\n", + " 'dark',\n", + " 'shall',\n", + " 'I',\n", + " 'raise',\n", + " 'my',\n", + " 'lantern,\\n\\nAnd',\n", + " 'the',\n", + " 'guardian',\n", + " 'of',\n", + " 'the',\n", + " 'night',\n", + " 'shall',\n", + " 'fill\\nit',\n", + " 'with',\n", + " 'oil',\n", + " 'and',\n", + " 'he',\n", + " 'shall',\n", + " 'light',\n", + " 'it',\n", + " 'also.\\n\\n*****\\n\\nThese',\n", + " 'things',\n", + " 'he',\n", + " 'said',\n", + " 'in',\n", + " 'words.',\n", + " 'But',\n", + " 'much\\nin',\n", + " 'his',\n", + " 'heart',\n", + " 'remained',\n", + " 'unsaid.',\n", + " 'For',\n", + " '{12}he\\nhimself',\n", + " 'could',\n", + " 'not',\n", + " 'speak',\n", + " 'his',\n", + " 'deeper\\nsecret.\\n\\n*****\\n\\n[Illustration:',\n", + " '0020]\\n\\nAnd',\n", + " 'when',\n", + " 'he',\n", + " 'entered',\n", + " 'into',\n", + " 'the',\n", + " 'city',\n", + " 'all\\nthe',\n", + " 'people',\n", + " 'came',\n", + " 'to',\n", + " 'meet',\n", + " 'him,',\n", + " 'and',\n", + " 'they\\nwere',\n", + " 'crying',\n", + " 'out',\n", + " 'to',\n", + " 'him',\n", + " 'as',\n", + " 'with',\n", + " 'one\\nvoice.\\n\\nAnd',\n", + " 'the',\n", + " 'elders',\n", + " 'of',\n", + " 'the',\n", + " 'city',\n", + " 'stood',\n", + " 'forth\\nand',\n", + " 'said:\\n\\nGo',\n", + " 'not',\n", + " 'yet',\n", + " 'away',\n", + " 'from',\n", + " 'us.\\n\\nA',\n", + " 'noontide',\n", + " 'have',\n", + " 'you',\n", + " 'been',\n", + " 'in',\n", + " 'our\\ntwilight,',\n", + " 'and',\n", + " 'your',\n", + " 'youth',\n", + " 'has',\n", + " 'given',\n", + " 'us\\ndreams',\n", + " 'to',\n", + " 'dream.\\n\\nNo',\n", + " 'stranger',\n", + " 'are',\n", + " 'you',\n", + " 'among',\n", + " 'us,',\n", + " 'nor\\na',\n", + " 'guest,',\n", + " 'but',\n", + " 'our',\n", + " 'son',\n", + " 'and',\n", + " 'our',\n", + " 'dearly\\nbeloved.\\n\\nSuffer',\n", + " 'not',\n", + " 'yet',\n", + " 'our',\n", + " 'eyes',\n", + " 'to',\n", + " 'hunger',\n", + " 'for\\nyour',\n", + " 'face.\\n\\n*****\\n\\nAnd',\n", + " 'the',\n", + " 'priests',\n", + " 'and',\n", + " 'the',\n", + " 'priestesses',\n", + " 'said\\nunto',\n", + " 'him:\\n\\nLet',\n", + " 'not',\n", + " 'the',\n", + " 'waves',\n", + " 'of',\n", + " 'the',\n", + " 'sea',\n", + " 'separate',\n", + " 'us\\nnow,',\n", + " 'and',\n", + " 'the',\n", + " 'years',\n", + " 'you',\n", + " 'have',\n", + " 'spent',\n", + " 'in',\n", + " 'our\\nmidst',\n", + " 'become',\n", + " 'a',\n", + " 'memory.\\n\\nYou',\n", + " 'have',\n", + " 'walked',\n", + " 'among',\n", + " 'us',\n", + " 'a',\n", + " 'spirit,\\n{13}and',\n", + " 'your',\n", + " 'shadow',\n", + " 'has',\n", + " 'been',\n", + " 'a',\n", + " 'light\\nupon',\n", + " 'our',\n", + " 'faces.\\n\\nMuch',\n", + " 'have',\n", + " 'we',\n", + " 'loved',\n", + " 'you.',\n", + " 'But',\n", + " 'speechless\\nwas',\n", + " 'our',\n", + " 'love,',\n", + " 'and',\n", + " 'with',\n", + " 'veils',\n", + " 'has',\n", + " 'it',\n", + " 'been\\nveiled.\\n\\nYet',\n", + " 'now',\n", + " 'it',\n", + " 'cries',\n", + " 'aloud',\n", + " 'unto',\n", + " 'you,',\n", + " 'and\\nwould',\n", + " 'stand',\n", + " 'revealed',\n", + " 'before',\n", + " 'you.\\n\\nAnd',\n", + " 'ever',\n", + " 'has',\n", + " 'it',\n", + " 'been',\n", + " 'that',\n", + " 'love',\n", + " 'knows\\nnot',\n", + " 'its',\n", + " 'own',\n", + " 'depth',\n", + " 'until',\n", + " 'the',\n", + " 'hour',\n", + " 'of\\nseparation.\\n\\n*****\\n\\nAnd',\n", + " 'others',\n", + " 'came',\n", + " 'also',\n", + " 'and',\n", + " 'entreated',\n", + " 'him.\\nBut',\n", + " 'he',\n", + " 'answered',\n", + " 'them',\n", + " 'not.',\n", + " 'He',\n", + " 'only',\n", + " 'bent\\nhis',\n", + " 'head;',\n", + " 'and',\n", + " 'those',\n", + " 'who',\n", + " 'stood',\n", + " 'near',\n", + " 'saw\\nhis',\n", + " 'tears',\n", + " 'falling',\n", + " 'upon',\n", + " 'his',\n", + " 'breast.\\n\\nAnd',\n", + " 'he',\n", + " 'and',\n", + " 'the',\n", + " 'people',\n", + " 'proceeded',\n", + " 'towards\\nthe',\n", + " 'great',\n", + " 'square',\n", + " 'before',\n", + " 'the',\n", + " 'temple.\\n\\nAnd',\n", + " 'there',\n", + " 'came',\n", + " 'out',\n", + " 'of',\n", + " 'the',\n", + " 'sanctuary',\n", + " 'a\\nwoman',\n", + " 'whose',\n", + " 'name',\n", + " 'was',\n", + " 'Almitra.',\n", + " 'And',\n", + " 'she\\nwas',\n", + " 'a',\n", + " 'seeress.\\n\\nAnd',\n", + " 'he',\n", + " 'looked',\n", + " 'upon',\n", + " 'her',\n", + " 'with',\n", + " 'exceeding\\ntenderness,',\n", + " 'for',\n", + " 'it',\n", + " 'was',\n", + " 'she',\n", + " 'who',\n", + " 'had',\n", + " 'first\\nsought',\n", + " 'and',\n", + " 'believed',\n", + " 'in',\n", + " 'him',\n", + " 'when',\n", + " 'he',\n", + " 'had\\nbeen',\n", + " 'but',\n", + " 'a',\n", + " 'day',\n", + " 'in',\n", + " 'their',\n", + " 'city.',\n", + " '{14}And\\nshe',\n", + " 'hailed',\n", + " 'him,',\n", + " 'saying:\\n\\nProphet',\n", + " 'of',\n", + " 'God,',\n", + " 'in',\n", + " 'quest',\n", + " 'of',\n", + " 'the\\nuttermost,',\n", + " 'long',\n", + " 'have',\n", + " 'you',\n", + " 'searched',\n", + " 'the\\ndistances',\n", + " 'for',\n", + " 'your',\n", + " 'ship.\\n\\nAnd',\n", + " 'now',\n", + " 'your',\n", + " 'ship',\n", + " 'has',\n", + " 'come,',\n", + " 'and',\n", + " 'you',\n", + " 'must\\nneeds',\n", + " 'go.\\n\\nDeep',\n", + " 'is',\n", + " 'your',\n", + " 'longing',\n", + " 'for',\n", + " 'the',\n", + " 'land',\n", + " 'of\\nyour',\n", + " 'memories',\n", + " 'and',\n", + " 'the',\n", + " 'dwelling',\n", + " 'place\\nof',\n", + " 'your',\n", + " 'greater',\n", + " 'desires;',\n", + " 'and',\n", + " 'our',\n", + " 'love\\nwould',\n", + " 'not',\n", + " 'bind',\n", + " 'you',\n", + " 'nor',\n", + " 'our',\n", + " 'needs',\n", + " 'hold\\nyou.\\n\\nYet',\n", + " 'this',\n", + " 'we',\n", + " 'ask',\n", + " 'ere',\n", + " 'you',\n", + " 'leave',\n", + " 'us,',\n", + " 'that\\nyou',\n", + " 'speak',\n", + " 'to',\n", + " 'us',\n", + " 'and',\n", + " 'give',\n", + " 'us',\n", + " 'of',\n", + " 'your\\ntruth.\\n\\nAnd',\n", + " 'we',\n", + " 'will',\n", + " 'give',\n", + " 'it',\n", + " 'unto',\n", + " 'our',\n", + " 'children,\\nand',\n", + " 'they',\n", + " 'unto',\n", + " 'their',\n", + " 'children,',\n", + " 'and',\n", + " 'it\\nshall',\n", + " 'not',\n", + " 'perish.\\n\\nIn',\n", + " 'your',\n", + " 'aloneness',\n", + " 'you',\n", + " 'have',\n", + " 'watched',\n", + " 'with\\nour',\n", + " 'days,',\n", + " 'and',\n", + " 'in',\n", + " 'your',\n", + " 'wakefulness',\n", + " 'you\\nhave',\n", + " 'listened',\n", + " 'to',\n", + " 'the',\n", + " 'weeping',\n", + " 'and',\n", + " 'the\\nlaughter',\n", + " 'of',\n", + " 'our',\n", + " 'sleep.\\n\\nNow',\n", + " 'therefore',\n", + " 'disclose',\n", + " 'us',\n", + " 'to',\n", + " 'ourselves,\\nand',\n", + " 'tell',\n", + " 'us',\n", + " 'all',\n", + " 'that',\n", + " 'has',\n", + " 'been',\n", + " 'shown\\nyou',\n", + " 'of',\n", + " 'that',\n", + " 'which',\n", + " 'is',\n", + " 'between',\n", + " 'birth',\n", + " 'and\\ndeath.\\n\\n*****\\n\\nAnd',\n", + " 'he',\n", + " 'answered,\\n\\nPeople',\n", + " 'of',\n", + " 'Orphalese,',\n", + " ...]" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "# your code here" + "prophet" ] }, { @@ -88,21 +1175,57 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 83, "metadata": {}, "outputs": [], "source": [ + "\n", "def reference(x):\n", - " '''\n", - " Input: A string\n", - " Output: The string with references removed\n", + " return re.split('{[0-9]+}', x)[0]\n", + "\n", + "\n", + " #'''\n", + " #Input: A string\n", + " #Output: The string with references removed\n", " \n", - " Example:\n", - " Input: 'the{7}'\n", - " Output: 'the'\n", - " '''\n", + " #Example:\n", + " #Input: 'the{7}'\n", + " #Output: 'the'\n", + " #'''\n", " \n", - " # your code here" + " # your code here\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['the', '7}']" + ] + }, + "execution_count": 85, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#prophet[1].split(\"{\")" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": {}, + "outputs": [], + "source": [ + "#def remove(character):\n", + "# return string.split(\"{\", \"\")\n", + " " ] }, { @@ -114,11 +1237,43 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 88, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "import re" + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['PROPHET\\n\\n|Almustafa,',\n", + " 'the',\n", + " 'chosen',\n", + " 'and',\n", + " 'the\\nbeloved,',\n", + " 'who',\n", + " 'was',\n", + " 'a',\n", + " 'dawn',\n", + " 'unto']" + ] + }, + "execution_count": 90, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# your code here\n", + "\n", + "prophet_reference = list(map(reference, prophet))\n", + "prophet_reference[0:10]" ] }, { @@ -130,7 +1285,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 91, "metadata": {}, "outputs": [], "source": [ @@ -143,7 +1298,7 @@ " Input: 'the\\nbeloved'\n", " Output: ['the', 'beloved']\n", " '''\n", - " \n", + " return re.split('\\n', x)[0]\n", " # your code here" ] }, @@ -156,11 +1311,12 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 92, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "# your code here\n", + "prophet_line = list(map(line_break, prophet_reference))" ] }, { @@ -172,11 +1328,25 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 93, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "['PROPHET', 'the', 'chosen', 'and', 'the', 'who', 'was', 'a', 'dawn', 'unto']" + ] + }, + "execution_count": 93, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "# your code here" + "# your code here\n", + "\n", + "prophet_flat = prophet_line\n", + "prophet_flat[0:10]" ] }, { @@ -190,7 +1360,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 95, "metadata": {}, "outputs": [], "source": [ @@ -210,6 +1380,10 @@ " '''\n", " \n", " word_list = ['and', 'the', 'a', 'an']\n", + " if x in word_list:\n", + " return False\n", + " else:\n", + " return True\n", " \n", " # your code here" ] @@ -221,6 +1395,15 @@ "Use the `filter()` function to filter out the words speficied in the `word_filter()` function. Store the filtered list in the variable `prophet_filter`." ] }, + { + "cell_type": "code", + "execution_count": 96, + "metadata": {}, + "outputs": [], + "source": [ + "prophet_filter = list(filter(word_filter, prophet_flat))" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -232,13 +1415,17 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 98, "metadata": {}, "outputs": [], "source": [ "def word_filter_case(x):\n", " \n", " word_list = ['and', 'the', 'a', 'an']\n", + " if x.lower in word_list:\n", + " return False\n", + " else:\n", + " return True\n", " \n", " # your code here" ] @@ -256,7 +1443,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 99, "metadata": {}, "outputs": [], "source": [ @@ -269,7 +1456,7 @@ " Input: 'John', 'Smith'\n", " Output: 'John Smith'\n", " '''\n", - " \n", + " return a + ' ' + b\n", " # your code here" ] }, @@ -282,11 +1469,12 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 100, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "# your code here\n", + "prophet_filter = reduce(concat_space, prophet_filter)" ] }, { @@ -302,11 +1490,12 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 104, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "# your code here\n", + "#df = pd.read_csv(\"Users/luisdemiguel/Downloads/PRSA_data_2010.1.1-2014.12.31.csv\")" ] }, { @@ -318,11 +1507,13 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 105, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "# your code here \n", + "# I cant find the csb\n", + "#df.head()" ] }, { @@ -334,21 +1525,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 106, "metadata": {}, "outputs": [], "source": [ - "def hourly(x):\n", - " '''\n", - " Input: A numerical value\n", - " Output: The value divided by 24\n", + "#def hourly(x):\n", + " #'''\n", + " #Input: A numerical value\n", + " #Output: The value divided by 24\n", " \n", - " Example:\n", - " Input: 48\n", - " Output: 2.0\n", - " '''\n", + " #Example:\n", + " #Input: 48\n", + " #Output: 2.0\n", + " #'''\n", " \n", - " # your code here" + " # your code here\n", + " #return x / 24" ] }, { @@ -360,11 +1552,14 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 107, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "# your code here\n", + "\n", + "#m25_hourly = df[['Iws', 'Is', 'Ir']]\n", + "#pm25_hourly.apply(hourly)" ] }, { @@ -378,29 +1573,45 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 108, "metadata": {}, "outputs": [], "source": [ - "def sample_sd(x):\n", - " '''\n", - " Input: A Pandas series of values\n", - " Output: the standard deviation divided by the number of elements in the series\n", + "#def sample_sd(x):\n", + " #'''\n", + " #Input: A Pandas series of values\n", + " #Output: the standard deviation divided by the number of elements in the series\n", " \n", - " Example:\n", - " Input: pd.Series([1,2,3,4])\n", - " Output: 0.3726779962\n", - " '''\n", - " \n", + " #Example:\n", + " #Input: pd.Series([1,2,3,4])\n", + " #Output: 0.3726779962\n", + " #'''\n", + " #return np.std(x) / x.count() -1\n", " # your code here" ] + }, + { + "cell_type": "code", + "execution_count": 109, + "metadata": {}, + "outputs": [], + "source": [ + "#cols.apply(sample_sd)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python [conda env:ihm1_env]", "language": "python", - "name": "python3" + "name": "conda-env-ihm1_env-py" }, "language_info": { "codemirror_mode": { @@ -412,7 +1623,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.2" + "version": "3.7.3" } }, "nbformat": 4, diff --git a/module-3/deep-learning/your-code/challenge-1.ipynb b/module-3/deep-learning/your-code/challenge-1.ipynb index 2487c5fc..fb352659 100644 --- a/module-3/deep-learning/your-code/challenge-1.ipynb +++ b/module-3/deep-learning/your-code/challenge-1.ipynb @@ -34,13 +34,476 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "import pandas as pd\n", + "import numpy as np\n", + "import tensorflow as tf\n", + "from sklearn.model_selection import train_test_split\n", + "from sklearn.preprocessing import OneHotEncoder, OrdinalEncoder, StandardScaler, LabelEncoder\n", + "from tensorflow import keras\n", + "from tensorflow.keras import layers" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
TLTMTRMLMMMRBLBMBRclass
0xxxxooxooTrue
1xxxxoooxoTrue
2xxxxooooxTrue
3xxxxooobbTrue
4xxxxoobobTrue
\n", + "
" + ], + "text/plain": [ + " TL TM TR ML MM MR BL BM BR class\n", + "0 x x x x o o x o o True\n", + "1 x x x x o o o x o True\n", + "2 x x x x o o o o x True\n", + "3 x x x x o o o b b True\n", + "4 x x x x o o b o b True" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# your code here\n", + "\n", + "df = pd.read_csv('tic-tac-toe.csv')\n", + "df.head()" + ] + }, + { + "cell_type": "code", + "execution_count": 5, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "encoder = LabelEncoder()\n", + "encoded = df.apply(encoder.fit_transform)\n", + "one_he_df = pd.DataFrame(encoded)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
TLTMTRMLMMMRBLBMBRclass
02222112111
12222111211
22222111121
32222111001
42222110101
.................................
9531222111220
9541212212120
9551212122120
9561211222120
9571122211220
\n", + "

958 rows × 10 columns

\n", + "
" + ], + "text/plain": [ + " TL TM TR ML MM MR BL BM BR class\n", + "0 2 2 2 2 1 1 2 1 1 1\n", + "1 2 2 2 2 1 1 1 2 1 1\n", + "2 2 2 2 2 1 1 1 1 2 1\n", + "3 2 2 2 2 1 1 1 0 0 1\n", + "4 2 2 2 2 1 1 0 1 0 1\n", + ".. .. .. .. .. .. .. .. .. .. ...\n", + "953 1 2 2 2 1 1 1 2 2 0\n", + "954 1 2 1 2 2 1 2 1 2 0\n", + "955 1 2 1 2 1 2 2 1 2 0\n", + "956 1 2 1 1 2 2 2 1 2 0\n", + "957 1 1 2 2 2 1 1 2 2 0\n", + "\n", + "[958 rows x 10 columns]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "one_he_df" ] }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Index(['TL', 'TM', 'TR', 'ML', 'MM', 'MR', 'BL', 'BM', 'BR', 'class'], dtype='object')" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "one_he_df.columns" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "0 1\n", + "1 1\n", + "2 1\n", + "3 1\n", + "4 1\n", + " ..\n", + "953 0\n", + "954 0\n", + "955 0\n", + "956 0\n", + "957 0\n", + "Name: class, Length: 958, dtype: int64" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "df_input = one_he_df[['TL', 'TM', 'TR', 'ML', 'MM', 'MR', 'BL', 'BM', 'BR']]\n", + "df_output = one_he_df['class']\n", + "df_output" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 1.00322257, 1.08495342, 1.00322257, ..., 1.00322257,\n", + " -0.16731812, -0.28682739],\n", + " [ 1.00322257, 1.08495342, 1.00322257, ..., -0.28682739,\n", + " 1.08495342, -0.28682739],\n", + " [ 1.00322257, 1.08495342, 1.00322257, ..., -0.28682739,\n", + " -0.16731812, 1.00322257],\n", + " ...,\n", + " [-0.28682739, 1.08495342, -0.28682739, ..., 1.00322257,\n", + " -0.16731812, 1.00322257],\n", + " [-0.28682739, 1.08495342, -0.28682739, ..., 1.00322257,\n", + " -0.16731812, 1.00322257],\n", + " [-0.28682739, -0.16731812, 1.00322257, ..., -0.28682739,\n", + " 1.08495342, 1.00322257]])" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "scaler = StandardScaler()\n", + "df_input = scaler.fit_transform(df_input)\n", + "df_input" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "markdown", "metadata": {}, @@ -60,11 +523,127 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(718, 9)" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/plain": [ + "(718,)" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "# your code here\n", + "\n", + "df_input_train, df_input_test, df_output_train, df_output_test = train_test_split(df_input, df_output)\n", + "display(df_input_train.shape)\n", + "display(df_output_train.shape)" + ] + }, + { + "cell_type": "code", + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "model = tf.keras.Sequential()\n", + "model = tf.keras.Sequential([\n", + " tf.keras.layers.Flatten(),\n", + " tf.keras.layers.Dense(128,activation='relu'),\n", + " tf.keras.layers.Dense(128,activation='relu'),\n", + " tf.keras.layers.Dense(2,activation='softmax')\n", + "])\n", + "model.compile(optimizer='adam', \n", + " loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True), \n", + " metrics=['accuracy'])\n", + "df_input_train = pd.DataFrame(df_input_train)\n", + "df_output_train = pd.DataFrame(df_output_train)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/8\n", + "WARNING:tensorflow:Layer flatten is casting an input tensor from dtype float64 to the layer's dtype of float32, which is new behavior in TensorFlow 2. The layer has dtype float32 because its dtype defaults to floatx.\n", + "\n", + "If you intended to run this layer in float32, you can safely ignore this warning. If in doubt, this warning is likely only an issue if you are porting a TensorFlow 1.X model to TensorFlow 2.\n", + "\n", + "To change all layers to have dtype float64 by default, call `tf.keras.backend.set_floatx('float64')`. To change just this layer, pass dtype='float64' to the layer constructor. If you are the author of this layer, you can disable autocasting by passing autocast=False to the base Layer constructor.\n", + "\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.6458 - accuracy: 0.6281\n", + "Epoch 2/8\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.6023 - accuracy: 0.6811\n", + "Epoch 3/8\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.5754 - accuracy: 0.7368\n", + "Epoch 4/8\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.5527 - accuracy: 0.7646\n", + "Epoch 5/8\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.5337 - accuracy: 0.7883\n", + "Epoch 6/8\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.5161 - accuracy: 0.8120\n", + "Epoch 7/8\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.4997 - accuracy: 0.8315\n", + "Epoch 8/8\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.4844 - accuracy: 0.8579\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "model.fit(df_input_train, df_output_train.values, epochs=8)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "WARNING:tensorflow:From /Users/luisdemiguel/miniconda3/envs/ihm1_env/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py:111: Model.state_updates (from tensorflow.python.keras.engine.training) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n", + "WARNING:tensorflow:From /Users/luisdemiguel/miniconda3/envs/ihm1_env/lib/python3.7/site-packages/tensorflow/python/training/tracking/tracking.py:111: Layer.updates (from tensorflow.python.keras.engine.base_layer) is deprecated and will be removed in a future version.\n", + "Instructions for updating:\n", + "This property should not be used in TensorFlow 2.0, as updates are applied automatically.\n", + "INFO:tensorflow:Assets written to: tic-tac-toe.model/assets\n" + ] + } + ], + "source": [ + "model.save('tic-tac-toe.model')" ] }, { @@ -78,13 +657,41 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, "outputs": [], "source": [ - "# your code here" + "# your code here\n", + "new = tf.keras.models.load_model('tic-tac-toe.model')\n", + "predictions = new.predict(df_input_test)" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "8/8 [==============================] - 0s 1ms/step - loss: 0.5138 - accuracy: 0.7875\n", + "0.5138465166091919 0.7875000238418579\n" + ] + } + ], + "source": [ + "v_loss, v_acc = model.evaluate(df_input_test, df_output_test)\n", + "print(v_loss, v_acc)" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, { "cell_type": "markdown", "metadata": {}, @@ -104,11 +711,50 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Epoch 1/9\n", + "23/23 [==============================] - 0s 1ms/step - loss: 0.3883 - accuracy: 0.8579\n", + "Epoch 2/9\n", + "23/23 [==============================] - 0s 1ms/step - loss: 0.3273 - accuracy: 0.8816\n", + "Epoch 3/9\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.3008 - accuracy: 0.8928\n", + "Epoch 4/9\n", + "23/23 [==============================] - 0s 1ms/step - loss: 0.2764 - accuracy: 0.9067\n", + "Epoch 5/9\n", + "23/23 [==============================] - 0s 1ms/step - loss: 0.2501 - accuracy: 0.9081\n", + "Epoch 6/9\n", + "23/23 [==============================] - 0s 1ms/step - loss: 0.2259 - accuracy: 0.9192\n", + "Epoch 7/9\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.2239 - accuracy: 0.9262\n", + "Epoch 8/9\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.1941 - accuracy: 0.9290\n", + "Epoch 9/9\n", + "23/23 [==============================] - 0s 2ms/step - loss: 0.1602 - accuracy: 0.9582\n" + ] + }, + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "# your code here" + "# your code here\n", + "new_model = tf.keras.models.load_model('tic-tac-toe.model')\n", + "optimize = tf.keras.optimizers.Adam(learning_rate=0.0015)\n", + "new_model.compile(optimizer=optimize, loss='sparse_categorical_crossentropy', metrics=['accuracy'])\n", + "new_model.fit(df_input_train, df_output_train, epochs=9)" ] }, { @@ -120,19 +766,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": {}, "outputs": [], "source": [ - "# your answer here" + "# your answer here\n", + "#increase the number of epochs" ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python [conda env:ihm1_env]", "language": "python", - "name": "python3" + "name": "conda-env-ihm1_env-py" }, "language_info": { "codemirror_mode": { diff --git a/module-3/deep-learning/your-code/tic-tac-toe.model/saved_model.pb b/module-3/deep-learning/your-code/tic-tac-toe.model/saved_model.pb new file mode 100644 index 00000000..380b4ecc Binary files /dev/null and b/module-3/deep-learning/your-code/tic-tac-toe.model/saved_model.pb differ diff --git a/module-3/deep-learning/your-code/tic-tac-toe.model/variables/variables.data-00000-of-00001 b/module-3/deep-learning/your-code/tic-tac-toe.model/variables/variables.data-00000-of-00001 new file mode 100644 index 00000000..81555768 Binary files /dev/null and b/module-3/deep-learning/your-code/tic-tac-toe.model/variables/variables.data-00000-of-00001 differ diff --git a/module-3/deep-learning/your-code/tic-tac-toe.model/variables/variables.index b/module-3/deep-learning/your-code/tic-tac-toe.model/variables/variables.index new file mode 100644 index 00000000..c9acb63d Binary files /dev/null and b/module-3/deep-learning/your-code/tic-tac-toe.model/variables/variables.index differ