This commit is contained in:
Andy Sotheran 2019-04-28 17:51:20 +01:00
parent d964435d12
commit 0ed655e9ad
14 changed files with 6052 additions and 637 deletions

View File

@ -31,15 +31,15 @@
\newlabel{glossary}{{3}{4}{Glossary}{section.3}{}}
\abx@aux@cite{SaTdpsmm}
\abx@aux@segm{0}{0}{SaTdpsmm}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {4}Introduction}{9}{section.4}}
\newlabel{introduction}{{4}{9}{Introduction}{section.4}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {5}Problem Articulation}{11}{section.5}}
\newlabel{problem}{{5}{11}{Problem Articulation}{section.5}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Problem Statement}{11}{subsection.5.1}}
\newlabel{statement}{{5.1}{11}{Problem Statement}{subsection.5.1}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Stakeholders}{11}{subsection.5.2}}
\newlabel{stakeholders}{{5.2}{11}{Stakeholders}{subsection.5.2}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Project Motivation}{12}{subsection.5.3}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {4}Introduction}{10}{section.4}}
\newlabel{introduction}{{4}{10}{Introduction}{section.4}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {5}Problem Articulation}{12}{section.5}}
\newlabel{problem}{{5}{12}{Problem Articulation}{section.5}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.1}Problem Statement}{12}{subsection.5.1}}
\newlabel{statement}{{5.1}{12}{Problem Statement}{subsection.5.1}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.2}Stakeholders}{12}{subsection.5.2}}
\newlabel{stakeholders}{{5.2}{12}{Stakeholders}{subsection.5.2}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.3}Project Motivation}{13}{subsection.5.3}}
\abx@aux@cite{nlAeiBTCPSO}
\abx@aux@segm{0}{0}{nlAeiBTCPSO}
\abx@aux@cite{BTCFTsent}
@ -47,26 +47,26 @@
\abx@aux@cite{BTCRNN}
\abx@aux@segm{0}{0}{BTCRNN}
\abx@aux@segm{0}{0}{SaTdpsmm}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.4}Technical Specification}{14}{subsection.5.4}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {5.4}Technical Specification}{15}{subsection.5.4}}
\abx@aux@cite{ISO9000}
\abx@aux@segm{0}{0}{ISO9000}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {6}Quality Goals}{16}{section.6}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.1}Process Description}{16}{subsection.6.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.2}Quality Objectives}{16}{subsection.6.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.3}Tools to Ensure Quality}{17}{subsection.6.3}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {6}Quality Goals}{17}{section.6}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.1}Process Description}{17}{subsection.6.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.2}Quality Objectives}{17}{subsection.6.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {6.3}Tools to Ensure Quality}{18}{subsection.6.3}}
\abx@aux@segm{0}{0}{BTCFTsent}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {7}Literature Review}{18}{section.7}}
\newlabel{literature}{{7}{18}{Literature Review}{section.7}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.1}Existing Tools}{18}{subsection.7.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.2}Related research}{18}{subsection.7.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {7}Literature Review}{19}{section.7}}
\newlabel{literature}{{7}{19}{Literature Review}{section.7}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.1}Existing Tools}{19}{subsection.7.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.2}Related research}{19}{subsection.7.2}}
\abx@aux@segm{0}{0}{SaTdpsmm}
\abx@aux@cite{StPNSentA}
\abx@aux@segm{0}{0}{StPNSentA}
\abx@aux@cite{MLBTCpred}
\abx@aux@segm{0}{0}{MLBTCpred}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.3}Data Collection}{19}{subsection.7.3}}
\newlabel{tweet_collection}{{7.3}{19}{Data Collection}{subsection.7.3}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.3.1}Twitter and Twitter API}{19}{subsubsection.7.3.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.3}Data Collection}{20}{subsection.7.3}}
\newlabel{tweet_collection}{{7.3}{20}{Data Collection}{subsection.7.3}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.3.1}Twitter and Twitter API}{20}{subsubsection.7.3.1}}
\abx@aux@cite{SearchTweets}
\abx@aux@segm{0}{0}{SearchTweets}
\abx@aux@cite{ConStream}
@ -74,16 +74,16 @@
\abx@aux@cite{TweepyStream}
\abx@aux@segm{0}{0}{TweepyStream}
\abx@aux@segm{0}{0}{SearchTweets}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.3.2}Tweepy Python Package}{20}{subsubsection.7.3.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.3.2}Tweepy Python Package}{21}{subsubsection.7.3.2}}
\abx@aux@segm{0}{0}{SaTdpsmm}
\abx@aux@segm{0}{0}{BTCFTsent}
\abx@aux@segm{0}{0}{MLBTCpred}
\abx@aux@cite{PolClassPatients}
\abx@aux@segm{0}{0}{PolClassPatients}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.4}Sentiment Analysis}{21}{subsection.7.4}}
\newlabel{sentiment}{{7.4}{21}{Sentiment Analysis}{subsection.7.4}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.4.1}Natural Language Processing}{21}{subsubsection.7.4.1}}
\newlabel{algorithms}{{7.4.1}{21}{Natural Language Processing}{subsubsection.7.4.1}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.4}Sentiment Analysis}{22}{subsection.7.4}}
\newlabel{sentiment}{{7.4}{22}{Sentiment Analysis}{subsection.7.4}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.4.1}Natural Language Processing}{22}{subsubsection.7.4.1}}
\newlabel{algorithms}{{7.4.1}{22}{Natural Language Processing}{subsubsection.7.4.1}{}}
\abx@aux@cite{LexiconSocSent}
\abx@aux@segm{0}{0}{LexiconSocSent}
\abx@aux@cite{VADERPaper}
@ -95,8 +95,8 @@
\abx@aux@segm{0}{0}{VADERPaper}
\abx@aux@segm{0}{0}{VADERPaper}
\abx@aux@segm{0}{0}{VADERPaper}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.4.2}Valence Aware Dictionary and sEntiment Reasoning}{22}{subsubsection.7.4.2}}
\newlabel{Vader}{{7.4.2}{22}{Valence Aware Dictionary and sEntiment Reasoning}{subsubsection.7.4.2}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.4.2}Valence Aware Dictionary and sEntiment Reasoning}{23}{subsubsection.7.4.2}}
\newlabel{Vader}{{7.4.2}{23}{Valence Aware Dictionary and sEntiment Reasoning}{subsubsection.7.4.2}{}}
\abx@aux@cite{NNDLBegin}
\abx@aux@segm{0}{0}{NNDLBegin}
\abx@aux@cite{WhatNN}
@ -104,16 +104,16 @@
\abx@aux@cite{SupdictL}
\abx@aux@segm{0}{0}{SupdictL}
\abx@aux@segm{0}{0}{WhatNN}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.5}Neural Networks}{23}{subsection.7.5}}
\newlabel{networks}{{7.5}{23}{Neural Networks}{subsection.7.5}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.5}Neural Networks}{24}{subsection.7.5}}
\newlabel{networks}{{7.5}{24}{Neural Networks}{subsection.7.5}{}}
\abx@aux@cite{ErrorProp}
\abx@aux@segm{0}{0}{ErrorProp}
\abx@aux@cite{BeginLSTMRNN}
\abx@aux@segm{0}{0}{BeginLSTMRNN}
\abx@aux@cite{RNNLSTMtds}
\abx@aux@segm{0}{0}{RNNLSTMtds}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.1}Recurrent Neural Network (RNN)}{24}{subsubsection.7.5.1}}
\newlabel{types}{{7.5.1}{24}{Recurrent Neural Network (RNN)}{subsubsection.7.5.1}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.1}Recurrent Neural Network (RNN)}{25}{subsubsection.7.5.1}}
\newlabel{types}{{7.5.1}{25}{Recurrent Neural Network (RNN)}{subsubsection.7.5.1}{}}
\abx@aux@cite{NNEgrad}
\abx@aux@segm{0}{0}{NNEgrad}
\abx@aux@cite{RNNvanishGrad}
@ -124,8 +124,8 @@
\abx@aux@cite{LSTMdia}
\abx@aux@segm{0}{0}{LSTMdia}
\abx@aux@segm{0}{0}{LSTMdia}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.2}Long-Short Term Memory (LSTM)}{25}{subsubsection.7.5.2}}
\newlabel{lstms}{{7.5.2}{25}{Long-Short Term Memory (LSTM)}{subsubsection.7.5.2}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.2}Long-Short Term Memory (LSTM)}{26}{subsubsection.7.5.2}}
\newlabel{lstms}{{7.5.2}{26}{Long-Short Term Memory (LSTM)}{subsubsection.7.5.2}{}}
\abx@aux@segm{0}{0}{LSTM}
\abx@aux@segm{0}{0}{LSTMdia}
\abx@aux@cite{LSTMmaths}
@ -136,7 +136,7 @@
\abx@aux@segm{0}{0}{LSTMforetime}
\abx@aux@cite{TensorFlow}
\abx@aux@segm{0}{0}{TensorFlow}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.3}Keras and TensorFlow}{26}{subsubsection.7.5.3}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.3}Keras and TensorFlow}{27}{subsubsection.7.5.3}}
\abx@aux@cite{OptSGD}
\abx@aux@segm{0}{0}{OptSGD}
\abx@aux@cite{Optimisers}
@ -145,7 +145,7 @@
\abx@aux@segm{0}{0}{Optimisers}
\abx@aux@cite{OptVariants}
\abx@aux@segm{0}{0}{OptVariants}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.4}Optimisers}{27}{subsubsection.7.5.4}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.4}Optimisers}{28}{subsubsection.7.5.4}}
\abx@aux@cite{OverOpt}
\abx@aux@segm{0}{0}{OverOpt}
\abx@aux@cite{Adam}
@ -153,138 +153,150 @@
\abx@aux@segm{0}{0}{Adam}
\abx@aux@cite{RegularisationSc}
\abx@aux@segm{0}{0}{RegularisationSc}
\abx@aux@cite{dropoutKeras}
\abx@aux@segm{0}{0}{dropoutKeras}
\abx@aux@cite{dropoutM}
\abx@aux@segm{0}{0}{dropoutM}
\abx@aux@cite{StudyNBC}
\abx@aux@segm{0}{0}{StudyNBC}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.5}Regularisation}{29}{subsubsection.7.5.5}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.6}Machine Learning}{29}{subsection.7.6}}
\newlabel{machine}{{7.6}{29}{Machine Learning}{subsection.7.6}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.6.1}Naive Bayes}{29}{subsubsection.7.6.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.5}Regularisation}{30}{subsubsection.7.5.5}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.5.6}Dropout}{30}{subsubsection.7.5.6}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.6}Machine Learning}{30}{subsection.7.6}}
\newlabel{machine}{{7.6}{30}{Machine Learning}{subsection.7.6}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {7.6.1}Naive Bayes}{30}{subsubsection.7.6.1}}
\abx@aux@segm{0}{0}{StudyNBC}
\abx@aux@cite{TFIDFBOW}
\abx@aux@segm{0}{0}{TFIDFBOW}
\abx@aux@segm{0}{0}{TFIDFBOW}
\abx@aux@segm{0}{0}{TFIDFBOW}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.7}Bag Of Words}{30}{subsection.7.7}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.8}TF-IDF}{30}{subsection.7.8}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.7}Bag Of Words}{31}{subsection.7.7}}
\abx@aux@cite{SpamCScratch}
\abx@aux@segm{0}{0}{SpamCScratch}
\abx@aux@segm{0}{0}{SpamCScratch}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.8}TF-IDF}{32}{subsection.7.8}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.9}Addictive Smoothing}{32}{subsection.7.9}}
\abx@aux@cite{RMSEMAE}
\abx@aux@segm{0}{0}{RMSEMAE}
\abx@aux@cite{MSE}
\abx@aux@segm{0}{0}{MSE}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.9}Addictive Smoothing}{31}{subsection.7.9}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.10}Regression Performance Metrics}{31}{subsection.7.10}}
\abx@aux@segm{0}{0}{RMSEMAE}
\abx@aux@cite{MAPE}
\abx@aux@segm{0}{0}{MAPE}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {7.10}Regression Performance Metrics}{33}{subsection.7.10}}
\abx@aux@cite{TweepyDoc}
\abx@aux@segm{0}{0}{TweepyDoc}
\abx@aux@segm{0}{0}{SearchTweets}
\abx@aux@segm{0}{0}{ConStream}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {8}Solution Approach}{33}{section.8}}
\newlabel{solution}{{8}{33}{Solution Approach}{section.8}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.1}Data gathering}{33}{subsection.8.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.2}Data pre-processing}{34}{subsection.8.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.3}Spam Filtering}{34}{subsection.8.3}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {8}Solution Approach}{34}{section.8}}
\newlabel{solution}{{8}{34}{Solution Approach}{section.8}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.1}Data gathering}{34}{subsection.8.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.2}Data pre-processing}{35}{subsection.8.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.3}Spam Filtering}{35}{subsection.8.3}}
\abx@aux@segm{0}{0}{VADERPaper}
\abx@aux@segm{0}{0}{VADERPaper}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.4}Language Detection}{35}{subsection.8.4}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.5}Sentiment Analysis}{35}{subsection.8.5}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.4}Language Detection}{36}{subsection.8.4}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.5}Sentiment Analysis}{36}{subsection.8.5}}
\abx@aux@segm{0}{0}{VADERPaper}
\abx@aux@segm{0}{0}{LexiconSocSent}
\abx@aux@segm{0}{0}{LSTM}
\abx@aux@segm{0}{0}{StPNSentA}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.6}Neural Network}{36}{subsection.8.6}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.6}Neural Network}{37}{subsection.8.6}}
\abx@aux@segm{0}{0}{LSTMforetime}
\abx@aux@segm{0}{0}{LSTMforetime}
\abx@aux@cite{TFvsThe}
\abx@aux@segm{0}{0}{TFvsThe}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.7}Price Forecasting}{38}{subsection.8.7}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.8}Frontend Application}{38}{subsection.8.8}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.9}With reference to Initial PID}{38}{subsection.8.9}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.10}Solution Summary}{39}{subsection.8.10}}
\newlabel{summary}{{8.10}{39}{Solution Summary}{subsection.8.10}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.11}Data flow Overview}{40}{subsection.8.11}}
\newlabel{data-flow}{{8.11}{40}{Data flow Overview}{subsection.8.11}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {9}System Design}{41}{section.9}}
\newlabel{Design}{{9}{41}{System Design}{section.9}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {9.1}Dataflow Designs}{41}{subsection.9.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.7}Price Forecasting}{39}{subsection.8.7}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.8}Frontend Application}{39}{subsection.8.8}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.9}With reference to Initial PID}{39}{subsection.8.9}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.10}Solution Summary}{40}{subsection.8.10}}
\newlabel{summary}{{8.10}{40}{Solution Summary}{subsection.8.10}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {8.11}Initial Data flow Overview}{41}{subsection.8.11}}
\newlabel{data-flow}{{8.11}{41}{Initial Data flow Overview}{subsection.8.11}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {9}System Design}{42}{section.9}}
\newlabel{Design}{{9}{42}{System Design}{section.9}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {9.1}Dataflow Designs}{42}{subsection.9.1}}
\abx@aux@segm{0}{0}{VADERPaper}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {9.2}Interface Design}{48}{subsection.9.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {9.2}Interface Design}{49}{subsection.9.2}}
\abx@aux@cite{btcCharts}
\abx@aux@segm{0}{0}{btcCharts}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {10}Implementation}{50}{section.10}}
\newlabel{implementation}{{10}{50}{Implementation}{section.10}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.1}Data collection}{50}{subsection.10.1}}
\newlabel{collection}{{10.1}{50}{Data collection}{subsection.10.1}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.1.1}Price Time-Series Historical Data}{50}{subsubsection.10.1.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {1}Historical price collection and averaging per exchange}{50}{lstlisting.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.1.2}Price Time-Series Live Data}{51}{subsubsection.10.1.2}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {2}Extraction of Price from exchanges}{51}{lstlisting.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.1.3}Historical Tweet Collection}{52}{subsubsection.10.1.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {3}Sample Curl request - data saved to json and python scripted called to process data}{52}{lstlisting.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {4}Sift-text python script - used alongside Curl command in Listing 4}{52}{lstlisting.4}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.1.4}Live Tweet Collection}{54}{subsubsection.10.1.4}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {5}Tweepy Streamer setup}{54}{lstlisting.5}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.2}Data pre-processing}{56}{subsection.10.2}}
\newlabel{processing}{{10.2}{56}{Data pre-processing}{subsection.10.2}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.2.1}Tweet Filtering}{56}{subsubsection.10.2.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {6}Basic data filtering and processing function - defined in 'tweet\_collector.py'}{56}{lstlisting.6}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.2.2}Language detection filtering}{57}{subsubsection.10.2.2}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {7}Language detection and filter function \cite {langdectNLTK}}{57}{lstlisting.7}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {10}Implementation}{51}{section.10}}
\newlabel{implementation}{{10}{51}{Implementation}{section.10}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.1}Data collection}{51}{subsection.10.1}}
\newlabel{collection}{{10.1}{51}{Data collection}{subsection.10.1}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.1.1}Price Time-Series Historical Data}{51}{subsubsection.10.1.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {1}Historical price collection and averaging per exchange}{51}{lstlisting.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.1.2}Price Time-Series Live Data}{52}{subsubsection.10.1.2}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {2}Extraction of Price from exchanges}{52}{lstlisting.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.1.3}Historical Tweet Collection}{53}{subsubsection.10.1.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {3}Sample Curl request - data saved to json and python scripted called to process data}{53}{lstlisting.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {4}Sift-text python script - used alongside Curl command in Listing 4}{53}{lstlisting.4}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.1.4}Live Tweet Collection}{55}{subsubsection.10.1.4}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {5}Tweepy Streamer setup}{55}{lstlisting.5}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.2}Data pre-processing}{57}{subsection.10.2}}
\newlabel{processing}{{10.2}{57}{Data pre-processing}{subsection.10.2}{}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.2.1}Tweet Filtering}{57}{subsubsection.10.2.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {6}Basic data filtering and processing function - defined in 'tweet\_collector.py'}{57}{lstlisting.6}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.2.2}Language detection filtering}{58}{subsubsection.10.2.2}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {7}Language detection and filter function \cite {langdectNLTK}}{58}{lstlisting.7}}
\abx@aux@cite{langdectNLTK}
\abx@aux@segm{0}{0}{langdectNLTK}
\abx@aux@segm{0}{0}{langdectNLTK}
\abx@aux@cite{LanNgram}
\abx@aux@segm{0}{0}{LanNgram}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.2.3}Spam filter - Tokenisation, Ngrams, Stopword removal and Stemming}{59}{subsubsection.10.2.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {8}pre-processing of data prior to being used by the spam filter}{59}{lstlisting.8}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.2.3}Spam filter - Tokenisation, Ngrams, Stopword removal and Stemming}{60}{subsubsection.10.2.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {8}pre-processing of data prior to being used by the spam filter}{60}{lstlisting.8}}
\abx@aux@cite{StemvsLem}
\abx@aux@segm{0}{0}{StemvsLem}
\abx@aux@segm{0}{0}{StemvsLem}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.3}Spam Filtering}{60}{subsection.10.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {9}Spam filter training Class - \textit {tweet\_collector.py}}{60}{lstlisting.9}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.3}Spam Filtering}{61}{subsection.10.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {9}Spam filter training Class - \textit {tweet\_collector.py}}{61}{lstlisting.9}}
\abx@aux@cite{NValgor}
\abx@aux@segm{0}{0}{NValgor}
\abx@aux@segm{0}{0}{SpamCScratch}
\abx@aux@cite{SpamOrHamGit}
\abx@aux@segm{0}{0}{SpamOrHamGit}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.3.1}Naive Bayes model}{63}{subsubsection.10.3.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {10}classifer class of spam\_filter.py}{63}{lstlisting.10}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.3.2}Classification}{64}{subsubsection.10.3.2}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {11}Classify Function of Parent classifier class of spam\_filter.py}{64}{lstlisting.11}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.3.3}Predict}{65}{subsubsection.10.3.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {12}Predict function of parent classifier class of spam\_filter.py}{65}{lstlisting.12}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.3.4}Metrics}{65}{subsubsection.10.3.4}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {13}Metrics function for calculating the performance and accuracy of the model}{65}{lstlisting.13}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.4}Sentiment Analysis}{66}{subsection.10.4}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {14}VADER polarity classification}{66}{lstlisting.14}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.5}Recurrent Neural Network - LSTM}{67}{subsection.10.5}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.5.1}Dataset Creation}{67}{subsubsection.10.5.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {15}Dataset creation and preprocessing}{68}{lstlisting.15}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.5.2}Training and Testing Model}{69}{subsubsection.10.5.2}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {16}LSTM model creation\tmspace +\thinmuskip {.1667em} layering\tmspace +\thinmuskip {.1667em} compiling and fitting}{69}{lstlisting.16}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.6}Future Prediction Forecasting}{71}{subsection.10.6}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {17}Forecasting future price of next hour for Bitcoin}{71}{lstlisting.17}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.7}User Interface}{73}{subsection.10.7}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.7.1}Key Functions}{73}{subsubsection.10.7.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {18}AJAX request and plotting performance data to HTML table}{73}{lstlisting.18}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {19}Chart creation with AJAX request}{74}{lstlisting.19}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.7.2}Final Interface}{76}{subsubsection.10.7.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {11}Testing Metrics and Accuracy}{77}{section.11}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {11.1}Integration Testing}{77}{subsection.11.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {11.2}Accuracy of Model \& Results}{78}{subsection.11.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {11.2.1}Results Discussion}{78}{subsubsection.11.2.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {11.2.2}Execution Speeds}{80}{subsubsection.11.2.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {12}Project Evaluation}{81}{section.12}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {13}Discussion: Contribution and Reflection}{81}{section.13}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {13.1}Limitations}{81}{subsection.13.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {14}Conclusion and Future Improvements}{82}{section.14}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {14.1}Conclusion}{82}{subsection.14.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {14.2}Future Improvements}{82}{subsection.14.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {15}Appendices}{88}{section.15}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {15.1}Appendix A - Project Initiation Document}{88}{subsection.15.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.3.1}Naive Bayes model}{64}{subsubsection.10.3.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {10}classifer class of spam\_filter.py}{64}{lstlisting.10}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.3.2}Classification}{65}{subsubsection.10.3.2}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {11}Classify Function of Parent classifier class of spam\_filter.py}{65}{lstlisting.11}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.3.3}Predict}{66}{subsubsection.10.3.3}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {12}Predict function of parent classifier class of spam\_filter.py}{66}{lstlisting.12}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.3.4}Metrics}{66}{subsubsection.10.3.4}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {13}Metrics function for calculating the performance and accuracy of the model}{66}{lstlisting.13}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.4}Sentiment Analysis}{67}{subsection.10.4}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {14}VADER polarity classification}{67}{lstlisting.14}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.5}Recurrent Neural Network - LSTM}{68}{subsection.10.5}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.5.1}Dataset Creation}{68}{subsubsection.10.5.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {15}Dataset creation and preprocessing}{69}{lstlisting.15}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.5.2}Training and Testing Model}{70}{subsubsection.10.5.2}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {16}LSTM model creation\tmspace +\thinmuskip {.1667em} layering\tmspace +\thinmuskip {.1667em} compiling and fitting}{70}{lstlisting.16}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.6}Future Prediction Forecasting}{72}{subsection.10.6}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {17}Forecasting future price of next hour for Bitcoin}{72}{lstlisting.17}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {10.7}User Interface}{74}{subsection.10.7}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.7.1}Key Functions}{74}{subsubsection.10.7.1}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {18}AJAX request and plotting performance data to HTML table}{74}{lstlisting.18}}
\@writefile{lol}{\defcounter {refsection}{0}\relax }\@writefile{lol}{\contentsline {lstlisting}{\numberline {19}Chart creation with AJAX request}{75}{lstlisting.19}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {10.7.2}Final Interface}{77}{subsubsection.10.7.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {11}Testing Metrics and Accuracy}{79}{section.11}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {11.1}Integration Testing}{79}{subsection.11.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {11.2}Accuracy of Model \& Results}{80}{subsection.11.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {11.2.1}Results Discussion}{80}{subsubsection.11.2.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{\numberline {11.2.2}Execution Speeds}{83}{subsubsection.11.2.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {12}Discussion: Contribution and Reflection}{84}{section.12}}
\abx@aux@segm{0}{0}{RMSEMAE}
\abx@aux@cite{MBE}
\abx@aux@segm{0}{0}{MBE}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {12.1}Limitations}{85}{subsection.12.1}}
\abx@aux@segm{0}{0}{VADERPaper}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {12.2}Reflection}{86}{subsection.12.2}}
\abx@aux@cite{TwitterTerms}
\abx@aux@segm{0}{0}{TwitterTerms}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {13}Social, Legal and Ethical Issues}{88}{section.13}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {14}Conclusion and Future Improvements}{89}{section.14}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {14.1}Conclusion}{89}{subsection.14.1}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {14.2}Future Improvements}{89}{subsection.14.2}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{\numberline {15}Appendices}{95}{section.15}}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {15.1}Appendix A - Project Initiation Document}{95}{subsection.15.1}}
\abx@aux@refcontextdefaultsdone
\abx@aux@defaultrefcontext{0}{SaTdpsmm}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{nlAeiBTCPSO}{none/global//global/global}
@ -319,6 +331,8 @@
\abx@aux@defaultrefcontext{0}{OverOpt}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{Adam}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{RegularisationSc}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{dropoutKeras}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{dropoutM}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{StudyNBC}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{TFIDFBOW}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{SpamCScratch}{none/global//global/global}
@ -333,4 +347,6 @@
\abx@aux@defaultrefcontext{0}{StemvsLem}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{NValgor}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{SpamOrHamGit}{none/global//global/global}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {15.2}Appendix B - Log book}{101}{subsection.15.2}}
\abx@aux@defaultrefcontext{0}{MBE}{none/global//global/global}
\abx@aux@defaultrefcontext{0}{TwitterTerms}{none/global//global/global}
\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{\numberline {15.2}Appendix B - Log book}{108}{subsection.15.2}}

View File

@ -1122,6 +1122,65 @@
\verb https://missinglink.ai/guides/neural-network-concepts/neural-network-bias-bias-neuron-overfitting-underfitting/
\endverb
\endentry
\entry{dropoutKeras}{inproceedings}{}
\name{author}{1}{}{%
{{hash=223bf2a82b36647ecfc4d214f4d21449}{%
family={Team},
familyi={T\bibinitperiod},
given={Keras},
giveni={K\bibinitperiod}}}%
}
\list{organization}{1}{%
{Keras}%
}
\strng{namehash}{223bf2a82b36647ecfc4d214f4d21449}
\strng{fullhash}{223bf2a82b36647ecfc4d214f4d21449}
\strng{bibnamehash}{223bf2a82b36647ecfc4d214f4d21449}
\strng{authorbibnamehash}{223bf2a82b36647ecfc4d214f4d21449}
\strng{authornamehash}{223bf2a82b36647ecfc4d214f4d21449}
\strng{authorfullhash}{223bf2a82b36647ecfc4d214f4d21449}
\field{sortinit}{5}
\field{sortinithash}{3c19c3776b658b3558e9e2e4840c01e2}
\field{labelnamesource}{author}
\field{labeltitlesource}{title}
\field{title}{Dropout}
\verb{urlraw}
\verb https://keras.io/layers/core/#dropout
\endverb
\verb{url}
\verb https://keras.io/layers/core/#dropout
\endverb
\endentry
\entry{dropoutM}{inproceedings}{}
\name{author}{1}{}{%
{{hash=1d88603e0d71dfb04a604b7a2c8fdf1c}{%
family={Budhiraja},
familyi={B\bibinitperiod},
given={Amar},
giveni={A\bibinitperiod}}}%
}
\list{organization}{1}{%
{Medium}%
}
\strng{namehash}{1d88603e0d71dfb04a604b7a2c8fdf1c}
\strng{fullhash}{1d88603e0d71dfb04a604b7a2c8fdf1c}
\strng{bibnamehash}{1d88603e0d71dfb04a604b7a2c8fdf1c}
\strng{authorbibnamehash}{1d88603e0d71dfb04a604b7a2c8fdf1c}
\strng{authornamehash}{1d88603e0d71dfb04a604b7a2c8fdf1c}
\strng{authorfullhash}{1d88603e0d71dfb04a604b7a2c8fdf1c}
\field{sortinit}{5}
\field{sortinithash}{3c19c3776b658b3558e9e2e4840c01e2}
\field{labelnamesource}{author}
\field{labeltitlesource}{title}
\field{title}{Dropout in (Deep) Machine learning}
\field{year}{Dec 15, 2016}
\verb{urlraw}
\verb https://medium.com/@amarbudhiraja/https-medium-com-amarbudhiraja-learning-less-to-learn-better-dropout-in-deep-machine-learning-74334da4bfc5
\endverb
\verb{url}
\verb https://medium.com/@amarbudhiraja/https-medium-com-amarbudhiraja-learning-less-to-learn-better-dropout-in-deep-machine-learning-74334da4bfc5
\endverb
\endentry
\entry{StudyNBC}{inproceedings}{}
\true{moreauthor}
\true{morelabelname}
@ -1171,8 +1230,8 @@
\strng{authorbibnamehash}{98de2b3b288cf4bd291e8937e5e28c49}
\strng{authornamehash}{98de2b3b288cf4bd291e8937e5e28c49}
\strng{authorfullhash}{98de2b3b288cf4bd291e8937e5e28c49}
\field{sortinit}{5}
\field{sortinithash}{3c19c3776b658b3558e9e2e4840c01e2}
\field{sortinit}{6}
\field{sortinithash}{57e57fb8451e7fcfa45d1e069f6d3136}
\field{labelnamesource}{author}
\field{labeltitlesource}{title}
\field{booktitle}{A.I Wiki}
@ -1347,8 +1406,8 @@
\strng{authorbibnamehash}{974f50284a1994b00c1f04d211402eb0}
\strng{authornamehash}{974f50284a1994b00c1f04d211402eb0}
\strng{authorfullhash}{974f50284a1994b00c1f04d211402eb0}
\field{sortinit}{7}
\field{sortinithash}{c818dd9105a2852444fc9f5e145c294e}
\field{sortinit}{8}
\field{sortinithash}{07edf88d4ea82509b9c4b4d13f41c452}
\field{labelnamesource}{author}
\field{labeltitlesource}{title}
\field{title}{Tensorflow Vs. Theano: What Do Researchers Prefer As An Artificial Intelligence Framework}
@ -1519,8 +1578,8 @@
\strng{authorbibnamehash}{d757410543b5d3323e9db89d373ef2ac}
\strng{authornamehash}{d757410543b5d3323e9db89d373ef2ac}
\strng{authorfullhash}{d757410543b5d3323e9db89d373ef2ac}
\field{sortinit}{8}
\field{sortinithash}{07edf88d4ea82509b9c4b4d13f41c452}
\field{sortinit}{9}
\field{sortinithash}{1dd72ab054147731c9d824b49aba0534}
\field{labelnamesource}{author}
\field{labeltitlesource}{title}
\field{title}{Spam-or-Ham}
@ -1532,6 +1591,62 @@
\verb https://github.com/tejank10/Spam-or-Ham
\endverb
\endentry
\entry{MBE}{inproceedings}{}
\name{author}{1}{}{%
{{hash=eb9806a56e1e80b69cc9a606b36cfe4b}{%
family={JJ},
familyi={J\bibinitperiod}}}%
}
\list{organization}{1}{%
{Medium}%
}
\strng{namehash}{eb9806a56e1e80b69cc9a606b36cfe4b}
\strng{fullhash}{eb9806a56e1e80b69cc9a606b36cfe4b}
\strng{bibnamehash}{eb9806a56e1e80b69cc9a606b36cfe4b}
\strng{authorbibnamehash}{eb9806a56e1e80b69cc9a606b36cfe4b}
\strng{authornamehash}{eb9806a56e1e80b69cc9a606b36cfe4b}
\strng{authorfullhash}{eb9806a56e1e80b69cc9a606b36cfe4b}
\field{sortinit}{9}
\field{sortinithash}{1dd72ab054147731c9d824b49aba0534}
\field{labelnamesource}{author}
\field{labeltitlesource}{title}
\field{title}{MAE and RMSE Which Metric is Better}
\field{year}{Mar 23, 2016}
\verb{urlraw}
\verb https://medium.com/human-in-a-machine-world/mae-and-rmse-which-metric-is-better-e60ac3bde13d
\endverb
\verb{url}
\verb https://medium.com/human-in-a-machine-world/mae-and-rmse-which-metric-is-better-e60ac3bde13d
\endverb
\endentry
\entry{TwitterTerms}{inproceedings}{}
\name{author}{1}{}{%
{{hash=2491bc9c7d8731e1ae33124093bc7026}{%
family={Twitter},
familyi={T\bibinitperiod}}}%
}
\list{organization}{1}{%
{Twitter Corp.}%
}
\strng{namehash}{2491bc9c7d8731e1ae33124093bc7026}
\strng{fullhash}{2491bc9c7d8731e1ae33124093bc7026}
\strng{bibnamehash}{2491bc9c7d8731e1ae33124093bc7026}
\strng{authorbibnamehash}{2491bc9c7d8731e1ae33124093bc7026}
\strng{authornamehash}{2491bc9c7d8731e1ae33124093bc7026}
\strng{authorfullhash}{2491bc9c7d8731e1ae33124093bc7026}
\field{sortinit}{9}
\field{sortinithash}{1dd72ab054147731c9d824b49aba0534}
\field{labelnamesource}{author}
\field{labeltitlesource}{title}
\field{title}{Developer Agreement and Policy}
\field{year}{Effective: May 25, 2018}
\verb{urlraw}
\verb https://developer.twitter.com/en/developer-terms/agreement-and-policy.html
\endverb
\verb{url}
\verb https://developer.twitter.com/en/developer-terms/agreement-and-policy.html
\endverb
\endentry
\enddatalist
\endrefsection
\endinput

View File

@ -1979,39 +1979,45 @@
<bcf:citekey order="53">Adam</bcf:citekey>
<bcf:citekey order="54">Adam</bcf:citekey>
<bcf:citekey order="55">RegularisationSc</bcf:citekey>
<bcf:citekey order="56">StudyNBC</bcf:citekey>
<bcf:citekey order="57">StudyNBC</bcf:citekey>
<bcf:citekey order="58">TFIDFBOW</bcf:citekey>
<bcf:citekey order="59">TFIDFBOW</bcf:citekey>
<bcf:citekey order="56">dropoutKeras</bcf:citekey>
<bcf:citekey order="57">dropoutM</bcf:citekey>
<bcf:citekey order="58">StudyNBC</bcf:citekey>
<bcf:citekey order="59">StudyNBC</bcf:citekey>
<bcf:citekey order="60">TFIDFBOW</bcf:citekey>
<bcf:citekey order="61">SpamCScratch</bcf:citekey>
<bcf:citekey order="62">SpamCScratch</bcf:citekey>
<bcf:citekey order="63">RMSEMAE</bcf:citekey>
<bcf:citekey order="64">MSE</bcf:citekey>
<bcf:citekey order="61">TFIDFBOW</bcf:citekey>
<bcf:citekey order="62">TFIDFBOW</bcf:citekey>
<bcf:citekey order="63">SpamCScratch</bcf:citekey>
<bcf:citekey order="64">SpamCScratch</bcf:citekey>
<bcf:citekey order="65">RMSEMAE</bcf:citekey>
<bcf:citekey order="66">MAPE</bcf:citekey>
<bcf:citekey order="67">TweepyDoc</bcf:citekey>
<bcf:citekey order="68">SearchTweets</bcf:citekey>
<bcf:citekey order="69">ConStream</bcf:citekey>
<bcf:citekey order="70">VADERPaper</bcf:citekey>
<bcf:citekey order="71">VADERPaper</bcf:citekey>
<bcf:citekey order="66">MSE</bcf:citekey>
<bcf:citekey order="67">RMSEMAE</bcf:citekey>
<bcf:citekey order="68">MAPE</bcf:citekey>
<bcf:citekey order="69">TweepyDoc</bcf:citekey>
<bcf:citekey order="70">SearchTweets</bcf:citekey>
<bcf:citekey order="71">ConStream</bcf:citekey>
<bcf:citekey order="72">VADERPaper</bcf:citekey>
<bcf:citekey order="73">LexiconSocSent</bcf:citekey>
<bcf:citekey order="74">LSTM</bcf:citekey>
<bcf:citekey order="75">StPNSentA</bcf:citekey>
<bcf:citekey order="76">LSTMforetime</bcf:citekey>
<bcf:citekey order="77">LSTMforetime</bcf:citekey>
<bcf:citekey order="78">TFvsThe</bcf:citekey>
<bcf:citekey order="79">VADERPaper</bcf:citekey>
<bcf:citekey order="80">btcCharts</bcf:citekey>
<bcf:citekey order="81">langdectNLTK</bcf:citekey>
<bcf:citekey order="82">langdectNLTK</bcf:citekey>
<bcf:citekey order="83">LanNgram</bcf:citekey>
<bcf:citekey order="84">StemvsLem</bcf:citekey>
<bcf:citekey order="85">StemvsLem</bcf:citekey>
<bcf:citekey order="86">NValgor</bcf:citekey>
<bcf:citekey order="87">SpamCScratch</bcf:citekey>
<bcf:citekey order="88">SpamOrHamGit</bcf:citekey>
<bcf:citekey order="73">VADERPaper</bcf:citekey>
<bcf:citekey order="74">VADERPaper</bcf:citekey>
<bcf:citekey order="75">LexiconSocSent</bcf:citekey>
<bcf:citekey order="76">LSTM</bcf:citekey>
<bcf:citekey order="77">StPNSentA</bcf:citekey>
<bcf:citekey order="78">LSTMforetime</bcf:citekey>
<bcf:citekey order="79">LSTMforetime</bcf:citekey>
<bcf:citekey order="80">TFvsThe</bcf:citekey>
<bcf:citekey order="81">VADERPaper</bcf:citekey>
<bcf:citekey order="82">btcCharts</bcf:citekey>
<bcf:citekey order="83">langdectNLTK</bcf:citekey>
<bcf:citekey order="84">langdectNLTK</bcf:citekey>
<bcf:citekey order="85">LanNgram</bcf:citekey>
<bcf:citekey order="86">StemvsLem</bcf:citekey>
<bcf:citekey order="87">StemvsLem</bcf:citekey>
<bcf:citekey order="88">NValgor</bcf:citekey>
<bcf:citekey order="89">SpamCScratch</bcf:citekey>
<bcf:citekey order="90">SpamOrHamGit</bcf:citekey>
<bcf:citekey order="91">RMSEMAE</bcf:citekey>
<bcf:citekey order="92">MBE</bcf:citekey>
<bcf:citekey order="93">VADERPaper</bcf:citekey>
<bcf:citekey order="94">TwitterTerms</bcf:citekey>
<bcf:citekey order="0">*</bcf:citekey>
</bcf:section>
<!-- SORTING TEMPLATES -->

View File

@ -1,26 +1,29 @@
[0] Config.pm:302> INFO - This is Biber 2.9
[0] Config.pm:305> INFO - Logfile is 'document.blg'
[19] biber:313> INFO - === Sun Apr 28, 2019, 11:50:08
[34] Biber.pm:371> INFO - Reading 'document.bcf'
[82] Biber.pm:854> INFO - Using all citekeys in bib section 0
[92] Biber.pm:3981> INFO - Processing section 0
[100] Biber.pm:4154> INFO - Looking for bibtex format file 'report.bib' for section 0
[101] bibtex.pm:1468> INFO - LaTeX decoding ...
[140] bibtex.pm:1294> INFO - Found BibTeX data source 'report.bib'
[146] Utils.pm:169> WARN - year field 'Oct 16, 2018' in entry 'MSE' is not an integer - this will probably not sort properly.
[182] Utils.pm:169> WARN - year field 'Aug 2, 2017' in entry 'SpamOrHamGit' is not an integer - this will probably not sort properly.
[185] Utils.pm:169> WARN - year field 'Sep 8, 2017' in entry 'MAPE' is not an integer - this will probably not sort properly.
[199] Utils.pm:169> WARN - year field 'Aug 2, 2017' in entry 'SpamCScratch' is not an integer - this will probably not sort properly.
[206] Utils.pm:169> WARN - Name "Mairal, J., Ponce, J., Sapiro, G., Zisserman, A." has too many commas: skipping name
[237] Utils.pm:169> WARN - year field 'Feb 26, 2018' in entry 'StemvsLem' is not an integer - this will probably not sort properly.
[246] Utils.pm:169> WARN - year field 'Apr 7, 2018' in entry 'RMSEMAE' is not an integer - this will probably not sort properly.
[250] Utils.pm:169> WARN - year field 'Mar 13, 2016' in entry 'LSTMdia' is not an integer - this will probably not sort properly.
[280] Utils.pm:169> WARN - BibTeX subsystem: warning: comma(s) at end of name (removing)
[280] Utils.pm:169> WARN - BibTeX subsystem: author, warning: comma(s) at end of name (removing)
[304] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'normalization = NFD' with 'normalization = prenormalized'
[304] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'variable = shifted' with 'variable = non-ignorable'
[304] Biber.pm:3809> INFO - Sorting list 'none/global//global/global' of type 'entry' with template 'none' and locale 'en-US'
[304] Biber.pm:3815> INFO - No sort tailoring available for locale 'en-US'
[326] bbl.pm:617> INFO - Writing 'document.bbl' with encoding 'ascii'
[342] bbl.pm:720> INFO - Output to document.bbl
[342] Biber.pm:110> INFO - WARNINGS: 10
[21] biber:313> INFO - === Sun Apr 28, 2019, 17:50:48
[37] Biber.pm:371> INFO - Reading 'document.bcf'
[89] Biber.pm:854> INFO - Using all citekeys in bib section 0
[99] Biber.pm:3981> INFO - Processing section 0
[107] Biber.pm:4154> INFO - Looking for bibtex format file 'report.bib' for section 0
[109] bibtex.pm:1468> INFO - LaTeX decoding ...
[154] bibtex.pm:1294> INFO - Found BibTeX data source 'report.bib'
[170] Utils.pm:169> WARN - year field 'Dec 15, 2016' in entry 'dropoutM' is not an integer - this will probably not sort properly.
[183] Utils.pm:169> WARN - year field 'Apr 7, 2018' in entry 'RMSEMAE' is not an integer - this will probably not sort properly.
[196] Utils.pm:169> WARN - year field 'Feb 26, 2018' in entry 'StemvsLem' is not an integer - this will probably not sort properly.
[210] Utils.pm:169> WARN - year field 'Effective: May 25, 2018' in entry 'TwitterTerms' is not an integer - this will probably not sort properly.
[212] Utils.pm:169> WARN - year field 'Aug 2, 2017' in entry 'SpamCScratch' is not an integer - this will probably not sort properly.
[222] Utils.pm:169> WARN - Name "Mairal, J., Ponce, J., Sapiro, G., Zisserman, A." has too many commas: skipping name
[240] Utils.pm:169> WARN - year field 'Mar 13, 2016' in entry 'LSTMdia' is not an integer - this will probably not sort properly.
[275] Utils.pm:169> WARN - year field 'Sep 8, 2017' in entry 'MAPE' is not an integer - this will probably not sort properly.
[277] Utils.pm:169> WARN - year field 'Aug 2, 2017' in entry 'SpamOrHamGit' is not an integer - this will probably not sort properly.
[282] Utils.pm:169> WARN - year field 'Oct 16, 2018' in entry 'MSE' is not an integer - this will probably not sort properly.
[297] Utils.pm:169> WARN - year field 'Mar 23, 2016' in entry 'MBE' is not an integer - this will probably not sort properly.
[320] Utils.pm:169> WARN - BibTeX subsystem: warning: comma(s) at end of name (removing)
[320] Utils.pm:169> WARN - BibTeX subsystem: author, warning: comma(s) at end of name (removing)
[348] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'normalization = NFD' with 'normalization = prenormalized'
[348] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'variable = shifted' with 'variable = non-ignorable'
[348] Biber.pm:3809> INFO - Sorting list 'none/global//global/global' of type 'entry' with template 'none' and locale 'en-US'
[348] Biber.pm:3815> INFO - No sort tailoring available for locale 'en-US'
[375] bbl.pm:617> INFO - Writing 'document.bbl' with encoding 'ascii'
[393] bbl.pm:720> INFO - Output to document.bbl
[394] Biber.pm:110> INFO - WARNINGS: 13

View File

@ -1,4 +1,4 @@
This is pdfTeX, Version 3.14159265-2.6-1.40.18 (TeX Live 2017/Debian) (preloaded format=pdflatex 2018.10.16) 28 APR 2019 11:50
This is pdfTeX, Version 3.14159265-2.6-1.40.18 (TeX Live 2017/Debian) (preloaded format=pdflatex 2018.10.16) 28 APR 2019 17:50
entering extended mode
restricted \write18 enabled.
%&-line parsing enabled.
@ -979,7 +979,7 @@ LaTeX Info: Redefining \nameref on input line 108.
\@outlinefile=\write5
\openout5 = `document.out'.
<images/reading_logo.png, id=349, 504.88625pt x 183.68625pt>
<images/reading_logo.png, id=357, 504.88625pt x 183.68625pt>
File: images/reading_logo.png Graphic file (type png)
<use images/reading_logo.png>
Package pdftex.def Info: images/reading_logo.png used on input line 111.
@ -997,26 +997,21 @@ File: umsb.fd 2013/01/14 v3.01 AMS symbols B
{/var/lib/texmf/fonts/map/pdftex/updmap/pdftex.map} <./images/reading_logo.png>
] [2] [3] [4]
(./document.toc [5] [6] [7])
[5] (./document.toc [6] [7] [8])
\tf@toc=\write6
\openout6 = `document.toc'.
[8] [9] [10]
LaTeX Font Info: Try loading font information for OMS+cmr on input line 215.
[9] [10] [11]
LaTeX Font Info: Try loading font information for OMS+cmr on input line 225.
(/usr/share/texlive/texmf-dist/tex/latex/base/omscmr.fd
File: omscmr.fd 2014/09/29 v2.5h Standard LaTeX font definitions
)
LaTeX Font Info: Font shape `OMS/cmr/m/n' in size <12> not available
(Font) Font shape `OMS/cmsy/m/n' tried instead on input line 215.
[11] [12] [13]
Underfull \hbox (badness 10000) in paragraph at lines 231--233
[]
Underfull \hbox (badness 10000) in paragraph at lines 240--242
(Font) Font shape `OMS/cmsy/m/n' tried instead on input line 225.
[12] [13] [14]
Underfull \hbox (badness 10000) in paragraph at lines 241--243
[]
@ -1025,239 +1020,273 @@ Underfull \hbox (badness 10000) in paragraph at lines 250--252
[]
[14]
Underfull \hbox (badness 10000) in paragraph at lines 260--262
[]
Underfull \hbox (badness 10000) in paragraph at lines 269--271
[15]
Underfull \hbox (badness 10000) in paragraph at lines 270--272
[]
[15] [16] [17] [18] [19] [20] [21] [22]
<images/perceptron.png, id=587, 706.64pt x 392.46625pt>
Underfull \hbox (badness 10000) in paragraph at lines 279--281
[]
[16] [17] [18] [19] [20] [21] [22] [23]
<images/perceptron.png, id=601, 706.64pt x 392.46625pt>
File: images/perceptron.png Graphic file (type png)
<use images/perceptron.png>
Package pdftex.def Info: images/perceptron.png used on input line 377.
Package pdftex.def Info: images/perceptron.png used on input line 387.
(pdftex.def) Requested size: 284.52713pt x 170.72142pt.
[23 <./images/perceptron.png>]
<images/rnn_ffn.png, id=601, 844.15375pt x 342.27875pt>
[24 <./images/perceptron.png>]
<images/rnn_ffn.png, id=615, 844.15375pt x 342.27875pt>
File: images/rnn_ffn.png Graphic file (type png)
<use images/rnn_ffn.png>
Package pdftex.def Info: images/rnn_ffn.png used on input line 397.
Package pdftex.def Info: images/rnn_ffn.png used on input line 407.
(pdftex.def) Requested size: 426.80307pt x 170.72112pt.
[24 <./images/rnn_ffn.png>]
<images/lstm.png, id=614, 1160.335pt x 1029.8475pt>
[25 <./images/rnn_ffn.png>]
<images/lstm.png, id=630, 1160.335pt x 1029.8475pt>
File: images/lstm.png Graphic file (type png)
<use images/lstm.png>
Package pdftex.def Info: images/lstm.png used on input line 415.
Package pdftex.def Info: images/lstm.png used on input line 425.
(pdftex.def) Requested size: 256.07123pt x 199.1616pt.
[25 <./images/lstm.png>] [26] [27] [28] [29]
Underfull \hbox (badness 10000) in paragraph at lines 529--532
[26 <./images/lstm.png>] [27] [28] [29] [30]
Overfull \hbox (4.49005pt too wide) detected at line 532
\OML/cmm/m/it/12 Probability of Outcome \OMS/cmsy/m/n/12 \ \OML/cmm/m/it/12 E
vidence \OT1/cmr/m/n/12 = []
[]
Underfull \hbox (badness 10000) in paragraph at lines 542--545
[]
[30]
Underfull \hbox (badness 10000) in paragraph at lines 546--548
[31]
Underfull \hbox (badness 10000) in paragraph at lines 559--561
[]
Overfull \hbox (9.8438pt too wide) detected at line 551
Overfull \hbox (9.8438pt too wide) detected at line 564
\OML/cmm/m/it/12 P\OT1/cmr/m/n/12 (\OML/cmm/m/it/12 w\OMS/cmsy/m/n/12 j\OML/cmm
/m/it/12 spam\OT1/cmr/m/n/12 ) = []
[]
[31] [32]
Underfull \hbox (badness 10000) in paragraph at lines 568--570
[32] [33]
Underfull \hbox (badness 10000) in paragraph at lines 581--583
[]
Underfull \hbox (badness 10000) in paragraph at lines 572--574
Underfull \hbox (badness 10000) in paragraph at lines 585--587
[]
Underfull \hbox (badness 10000) in paragraph at lines 575--579
Underfull \hbox (badness 10000) in paragraph at lines 588--592
[]
Underfull \hbox (badness 10000) in paragraph at lines 580--582
Underfull \hbox (badness 10000) in paragraph at lines 593--595
[]
Underfull \hbox (badness 10000) in paragraph at lines 583--587
Underfull \hbox (badness 10000) in paragraph at lines 596--600
[]
[33] [34] [35] [36] [37] [38] [39]
<images/Generic_Flow.png, id=751, 1900.09875pt x 529.98pt>
[34] [35] [36] [37] [38] [39] [40]
<images/Generic_Flow.png, id=769, 1900.09875pt x 529.98pt>
File: images/Generic_Flow.png Graphic file (type png)
<use images/Generic_Flow.png>
Package pdftex.def Info: images/Generic_Flow.png used on input line 680.
Package pdftex.def Info: images/Generic_Flow.png used on input line 693.
(pdftex.def) Requested size: 512.13474pt x 227.62009pt.
Overfull \hbox (71.28728pt too wide) in paragraph at lines 680--681
Overfull \hbox (71.28728pt too wide) in paragraph at lines 693--694
[][]
[]
[40 <./images/Generic_Flow.png (PNG copy)>]
Underfull \hbox (badness 10000) in paragraph at lines 691--693
[41 <./images/Generic_Flow.png (PNG copy)>]
Underfull \hbox (badness 10000) in paragraph at lines 704--706
[]
<images/Dataflow.png, id=756, 3767.07375pt x 1526.70375pt>
<images/Dataflow.png, id=774, 3767.07375pt x 1526.70375pt>
File: images/Dataflow.png Graphic file (type png)
<use images/Dataflow.png>
Package pdftex.def Info: images/Dataflow.png used on input line 696.
Package pdftex.def Info: images/Dataflow.png used on input line 709.
(pdftex.def) Requested size: 512.09683pt x 227.62125pt.
Overfull \hbox (71.24937pt too wide) in paragraph at lines 696--698
Overfull \hbox (71.24937pt too wide) in paragraph at lines 709--711
[]
[]
[41 <./images/Dataflow.png (PNG copy)>]
<images/Data_Collector.png, id=761, 1152.305pt x 647.41875pt>
[42 <./images/Dataflow.png (PNG copy)>]
<images/Data_Collector.png, id=779, 1152.305pt x 647.41875pt>
File: images/Data_Collector.png Graphic file (type png)
<use images/Data_Collector.png>
Package pdftex.def Info: images/Data_Collector.png used on input line 704.
Package pdftex.def Info: images/Data_Collector.png used on input line 717.
(pdftex.def) Requested size: 426.78574pt x 227.61746pt.
[42 <./images/Data_Collector.png (PNG copy)>]
<images/Analysis_Engine.png, id=766, 1658.195pt x 719.68875pt>
[43 <./images/Data_Collector.png (PNG copy)>]
<images/Analysis_Engine.png, id=785, 1658.195pt x 719.68875pt>
File: images/Analysis_Engine.png Graphic file (type png)
<use images/Analysis_Engine.png>
Package pdftex.def Info: images/Analysis_Engine.png used on input line 719.
Package pdftex.def Info: images/Analysis_Engine.png used on input line 732.
(pdftex.def) Requested size: 483.67276pt x 227.62561pt.
Overfull \hbox (42.8253pt too wide) in paragraph at lines 719--721
Overfull \hbox (42.8253pt too wide) in paragraph at lines 732--734
[]
[]
[43 <./images/Analysis_Engine.png (PNG copy)>] [44]
<images/Neural_Network.png, id=777, 1502.61375pt x 1032.85875pt>
[44 <./images/Analysis_Engine.png (PNG copy)>] [45]
<images/Neural_Network.png, id=795, 1502.61375pt x 1032.85875pt>
File: images/Neural_Network.png Graphic file (type png)
<use images/Neural_Network.png>
Package pdftex.def Info: images/Neural_Network.png used on input line 737.
Package pdftex.def Info: images/Neural_Network.png used on input line 750.
(pdftex.def) Requested size: 483.6893pt x 341.42757pt.
Overfull \hbox (42.84184pt too wide) in paragraph at lines 737--739
Overfull \hbox (42.84184pt too wide) in paragraph at lines 750--752
[]
[]
[45 <./images/Neural_Network.png (PNG copy)>]
<images/Future_Predictions.png, id=782, 1596.96625pt x 490.83376pt>
[46 <./images/Neural_Network.png (PNG copy)>]
<images/Future_Predictions.png, id=800, 1596.96625pt x 490.83376pt>
File: images/Future_Predictions.png Graphic file (type png)
<use images/Future_Predictions.png>
Package pdftex.def Info: images/Future_Predictions.png used on input line 751.
Package pdftex.def Info: images/Future_Predictions.png used on input line 764.
(pdftex.def) Requested size: 512.1362pt x 227.62119pt.
Overfull \hbox (71.28874pt too wide) in paragraph at lines 751--753
Overfull \hbox (71.28874pt too wide) in paragraph at lines 764--766
[]
[]
[46 <./images/Future_Predictions.png (PNG copy)>]
<images/Frontend_Application.png, id=787, 804.00375pt x 599.23875pt>
[47 <./images/Future_Predictions.png (PNG copy)>]
<images/Frontend_Application.png, id=805, 804.00375pt x 599.23875pt>
File: images/Frontend_Application.png Graphic file (type png)
<use images/Frontend_Application.png>
Package pdftex.def Info: images/Frontend_Application.png used on input line 76
4.
Package pdftex.def Info: images/Frontend_Application.png used on input line 77
7.
(pdftex.def) Requested size: 284.52162pt x 256.07664pt.
[47 <./images/Frontend_Application.png (PNG copy)>]
<images/interface_design.png, id=792, 1086.0575pt x 1536.74126pt>
[48 <./images/Frontend_Application.png (PNG copy)>]
<images/interface_design.png, id=810, 1086.0575pt x 1536.74126pt>
File: images/interface_design.png Graphic file (type png)
<use images/interface_design.png>
Package pdftex.def Info: images/interface_design.png used on input line 780.
Package pdftex.def Info: images/interface_design.png used on input line 793.
(pdftex.def) Requested size: 227.61479pt x 369.88063pt.
[48 <./images/interface_design.png>] [49]
Underfull \hbox (badness 10000) in paragraph at lines 791--793
[49 <./images/interface_design.png>] [50]
Underfull \hbox (badness 10000) in paragraph at lines 804--806
[]
(/usr/share/texlive/texmf-dist/tex/latex/listings/lstlang1.sty
File: lstlang1.sty 2015/06/04 1.6 listings language file
) [50] [51]
[52] [53]
Underfull \hbox (badness 10000) in paragraph at lines 941--943
) [51] [52]
[53] [54]
Underfull \hbox (badness 10000) in paragraph at lines 954--956
[]
[54] [55] [56] [57] [58] [59] [60] [61] [62]
[55] [56] [57] [58] [59] [60] [61] [62] [63]
LaTeX Font Info: Font shape `OMS/cmr/m/n' in size <10> not available
(Font) Font shape `OMS/cmsy/m/n' tried instead on input line 1219.
(Font) Font shape `OMS/cmsy/m/n' tried instead on input line 1232.
[63] [64] [65] [66] [67] [68]
[69] [70] [71] [72] [73]
Underfull \hbox (badness 10000) in paragraph at lines 1551--1553
[64] [65] [66] [67] [68] [69]
[70] [71] [72] [73] [74]
Underfull \hbox (badness 10000) in paragraph at lines 1564--1566
[]
[74] [75]
<images/final_interface.png, id=1568, 1913.1475pt x 2695.06876pt>
File: images/final_interface.png Graphic file (type png)
<use images/final_interface.png>
Package pdftex.def Info: images/final_interface.png used on input line 1618.
(pdftex.def) Requested size: 398.32816pt x 540.60803pt.
[76 <./images/final_interface.png>] [77] [78] [79] [80] [81] [82]
LaTeX Font Info: Try loading font information for T1+cmtt on input line 1793
[75] [76]
LaTeX Font Info: Try loading font information for T1+cmtt on input line 1628
.
(/usr/share/texlive/texmf-dist/tex/latex/base/t1cmtt.fd
(/usr/share/texlive/texmf-dist/tex/latex/base/t1cmtt.fd
File: t1cmtt.fd 2014/09/29 v2.5h Standard LaTeX font definitions
)
Overfull \hbox (40.38213pt too wide) in paragraph at lines 1794--1794
<images/final_interface.png, id=1588, 1913.1475pt x 2695.06876pt>
File: images/final_interface.png Graphic file (type png)
<use images/final_interface.png>
Package pdftex.def Info: images/final_interface.png used on input line 1632.
(pdftex.def) Requested size: 398.32816pt x 540.60803pt.
[77]
[78 <./images/final_interface.png>] [79] [80]
<images/with_sentiment.png, id=1613, 1907.125pt x 518.93875pt>
File: images/with_sentiment.png Graphic file (type png)
<use images/with_sentiment.png>
Package pdftex.def Info: images/with_sentiment.png used on input line 1738.
(pdftex.def) Requested size: 455.24582pt x 170.71979pt.
Overfull \hbox (14.39836pt too wide) in paragraph at lines 1738--1739
[][]
[]
<images/without_sentiment.png, id=1615, 1913.1475pt x 496.85625pt>
File: images/without_sentiment.png Graphic file (type png)
<use images/without_sentiment.png>
Package pdftex.def Info: images/without_sentiment.png used on input line 1746.
(pdftex.def) Requested size: 455.22383pt x 170.7181pt.
Overfull \hbox (14.37637pt too wide) in paragraph at lines 1746--1747
[][]
[]
[81] [82 <./images/with_sentiment.png> <./images/without_sentiment.png>]
[83] [84] [85] [86] [87] [88] [89]
Overfull \hbox (40.38213pt too wide) in paragraph at lines 1867--1867
\T1/cmr/m/n/12 works,'' To-wards Data Sci-ence, 2018. [On-line]. Avail-able: []
$\T1/cmtt/m/n/12 https : / / towardsdatascience .
[]
[83]
Overfull \hbox (83.66737pt too wide) in paragraph at lines 1794--1794
[90]
Overfull \hbox (83.66737pt too wide) in paragraph at lines 1867--1867
\T1/cmr/m/n/12 works,'' Ma-chine Larn-ing Mas-tery, 2017. [On-line]. Avail-able
: []$\T1/cmtt/m/n/12 https : / / machinelearningmastery .
[]
Overfull \hbox (28.45175pt too wide) in paragraph at lines 1794--1794
Overfull \hbox (28.45175pt too wide) in paragraph at lines 1867--1867
\T1/cmr/m/n/12 lem,'' Su-per Data Sci-ence, 2018. [On-line]. Avail-able: []$\T1
/cmtt/m/n/12 https : / / www . superdatascience .
[]
[84]
Overfull \hbox (7.75049pt too wide) in paragraph at lines 1794--1794
[91]
Overfull \hbox (7.75049pt too wide) in paragraph at lines 1867--1867
\T1/cmr/m/n/12 2019. [On-line]. Avail-able: []$\T1/cmtt/m/n/12 https : / / medi
um . com / datadriveninvestor / overview -[]
[]
[85]
Overfull \hbox (7.25049pt too wide) in paragraph at lines 1794--1794
[92]
Overfull \hbox (7.25049pt too wide) in paragraph at lines 1867--1867
\T1/cmr/m/n/12 2017. [On-line]. Avail-able: []$\T1/cmtt/m/n/12 https : / / www
. statisticshowto . datasciencecentral .
[]
Overfull \hbox (9.24751pt too wide) in paragraph at lines 1794--1794
Overfull \hbox (9.24751pt too wide) in paragraph at lines 1867--1867
\T1/cmr/m/n/12 [On-line]. Avail-able: []$\T1/cmtt/m/n/12 http : / / blog . alej
andronolla . com / 2013 / 05 / 15 / detecting -[]
[]
Overfull \hbox (0.88026pt too wide) in paragraph at lines 1794--1794
Overfull \hbox (0.88026pt too wide) in paragraph at lines 1867--1867
[]\T1/cmr/m/n/12 P. Cryp-tog-ra-phy, ``A tu-to-rial on au-to-matic lan-guage id
en-ti-fi-ca-tion - ngram based,''
[]
[86] [87]
[93] [94]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1721, 597.55246pt x 845.07718pt>
<PID.pdf, id=1794, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf>
Package pdftex.def Info: PID.pdf used on input line 1799.
Package pdftex.def Info: PID.pdf used on input line 1872.
(pdftex.def) Requested size: 597.551pt x 845.07512pt.
@ -1265,7 +1294,7 @@ pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
File: PID.pdf Graphic file (type pdf)
<use PID.pdf>
Package pdftex.def Info: PID.pdf used on input line 1799.
Package pdftex.def Info: PID.pdf used on input line 1872.
(pdftex.def) Requested size: 597.551pt x 845.07512pt.
@ -1275,235 +1304,235 @@ rsion <1.7>, but at most version <1.5> allowed
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1724, page=1, 597.55246pt x 845.07718pt>
<PID.pdf, id=1797, page=1, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 1>
Package pdftex.def Info: PID.pdf , page1 used on input line 1799.
Package pdftex.def Info: PID.pdf , page1 used on input line 1872.
(pdftex.def) Requested size: 597.551pt x 845.07512pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 1>
Package pdftex.def Info: PID.pdf , page1 used on input line 1799.
Package pdftex.def Info: PID.pdf , page1 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[88]
[95]
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 1>
Package pdftex.def Info: PID.pdf , page1 used on input line 1799.
Package pdftex.def Info: PID.pdf , page1 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 1>
Package pdftex.def Info: PID.pdf , page1 used on input line 1799.
Package pdftex.def Info: PID.pdf , page1 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 1>
Package pdftex.def Info: PID.pdf , page1 used on input line 1799.
Package pdftex.def Info: PID.pdf , page1 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[89 <./PID.pdf>]
[96 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1755, page=2, 597.55246pt x 845.07718pt>
<PID.pdf, id=1827, page=2, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 2>
Package pdftex.def Info: PID.pdf , page2 used on input line 1799.
Package pdftex.def Info: PID.pdf , page2 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 2>
Package pdftex.def Info: PID.pdf , page2 used on input line 1799.
Package pdftex.def Info: PID.pdf , page2 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 2>
Package pdftex.def Info: PID.pdf , page2 used on input line 1799.
Package pdftex.def Info: PID.pdf , page2 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[90 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1761, page=3, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 3>
Package pdftex.def Info: PID.pdf , page3 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 3>
Package pdftex.def Info: PID.pdf , page3 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 3>
Package pdftex.def Info: PID.pdf , page3 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[91 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1776, page=4, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 4>
Package pdftex.def Info: PID.pdf , page4 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 4>
Package pdftex.def Info: PID.pdf , page4 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 4>
Package pdftex.def Info: PID.pdf , page4 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[92 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1782, page=5, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 5>
Package pdftex.def Info: PID.pdf , page5 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 5>
Package pdftex.def Info: PID.pdf , page5 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 5>
Package pdftex.def Info: PID.pdf , page5 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[93 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1788, page=6, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 6>
Package pdftex.def Info: PID.pdf , page6 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 6>
Package pdftex.def Info: PID.pdf , page6 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 6>
Package pdftex.def Info: PID.pdf , page6 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[94 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1794, page=7, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 7>
Package pdftex.def Info: PID.pdf , page7 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 7>
Package pdftex.def Info: PID.pdf , page7 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 7>
Package pdftex.def Info: PID.pdf , page7 used on input line 1799.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[95 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1800, page=8, 845.07718pt x 597.55246pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 8>
Package pdftex.def Info: PID.pdf , page8 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 8>
Package pdftex.def Info: PID.pdf , page8 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 8>
Package pdftex.def Info: PID.pdf , page8 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
[96 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1809, page=9, 845.07718pt x 597.55246pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 9>
Package pdftex.def Info: PID.pdf , page9 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 9>
Package pdftex.def Info: PID.pdf , page9 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 9>
Package pdftex.def Info: PID.pdf , page9 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
[97 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1820, page=10, 845.07718pt x 597.55246pt>
<PID.pdf, id=1834, page=3, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 10>
Package pdftex.def Info: PID.pdf , page10 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 3>
Package pdftex.def Info: PID.pdf , page3 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 10>
Package pdftex.def Info: PID.pdf , page10 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 3>
Package pdftex.def Info: PID.pdf , page3 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 10>
Package pdftex.def Info: PID.pdf , page10 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 3>
Package pdftex.def Info: PID.pdf , page3 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[98 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1832, page=11, 845.07718pt x 597.55246pt>
<PID.pdf, id=1848, page=4, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 11>
Package pdftex.def Info: PID.pdf , page11 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 4>
Package pdftex.def Info: PID.pdf , page4 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 11>
Package pdftex.def Info: PID.pdf , page11 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 4>
Package pdftex.def Info: PID.pdf , page4 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 11>
Package pdftex.def Info: PID.pdf , page11 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 4>
Package pdftex.def Info: PID.pdf , page4 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[99 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1838, page=12, 845.07718pt x 597.55246pt>
<PID.pdf, id=1854, page=5, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 12>
Package pdftex.def Info: PID.pdf , page12 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 5>
Package pdftex.def Info: PID.pdf , page5 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 12>
Package pdftex.def Info: PID.pdf , page12 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 5>
Package pdftex.def Info: PID.pdf , page5 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 12>
Package pdftex.def Info: PID.pdf , page12 used on input line 1799.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
<use PID.pdf, page 5>
Package pdftex.def Info: PID.pdf , page5 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[100 <./PID.pdf>]
Package atveryend Info: Empty hook `BeforeClearDocument' on input line 1804.
[101]
Package atveryend Info: Empty hook `AfterLastShipout' on input line 1804.
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1860, page=6, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 6>
Package pdftex.def Info: PID.pdf , page6 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 6>
Package pdftex.def Info: PID.pdf , page6 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 6>
Package pdftex.def Info: PID.pdf , page6 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[101 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1866, page=7, 597.55246pt x 845.07718pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 7>
Package pdftex.def Info: PID.pdf , page7 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 7>
Package pdftex.def Info: PID.pdf , page7 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 7>
Package pdftex.def Info: PID.pdf , page7 used on input line 1872.
(pdftex.def) Requested size: 562.1644pt x 795.0303pt.
[102 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1872, page=8, 845.07718pt x 597.55246pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 8>
Package pdftex.def Info: PID.pdf , page8 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 8>
Package pdftex.def Info: PID.pdf , page8 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 8>
Package pdftex.def Info: PID.pdf , page8 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
[103 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1882, page=9, 845.07718pt x 597.55246pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 9>
Package pdftex.def Info: PID.pdf , page9 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 9>
Package pdftex.def Info: PID.pdf , page9 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 9>
Package pdftex.def Info: PID.pdf , page9 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
[104 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1893, page=10, 845.07718pt x 597.55246pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 10>
Package pdftex.def Info: PID.pdf , page10 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 10>
Package pdftex.def Info: PID.pdf , page10 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 10>
Package pdftex.def Info: PID.pdf , page10 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
[105 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1905, page=11, 845.07718pt x 597.55246pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 11>
Package pdftex.def Info: PID.pdf , page11 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 11>
Package pdftex.def Info: PID.pdf , page11 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 11>
Package pdftex.def Info: PID.pdf , page11 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
[106 <./PID.pdf>]
pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve
rsion <1.7>, but at most version <1.5> allowed
<PID.pdf, id=1911, page=12, 845.07718pt x 597.55246pt>
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 12>
Package pdftex.def Info: PID.pdf , page12 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 12>
Package pdftex.def Info: PID.pdf , page12 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
File: PID.pdf Graphic file (type pdf)
<use PID.pdf, page 12>
Package pdftex.def Info: PID.pdf , page12 used on input line 1872.
(pdftex.def) Requested size: 795.0303pt x 562.1644pt.
[107 <./PID.pdf>]
Package atveryend Info: Empty hook `BeforeClearDocument' on input line 1877.
[108]
Package atveryend Info: Empty hook `AfterLastShipout' on input line 1877.
(./document.aux)
Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 1804.
Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 1804.
Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 1877.
Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 1877.
Package rerunfilecheck Info: File `document.out' has not changed.
(rerunfilecheck) Checksum: D1AAB54CA0419CEB1471D36AF4D6DC10;13872.
(rerunfilecheck) Checksum: 1D7B2504DFF5D56ABCCDF1948D08498A;14207.
Package logreq Info: Writing requests to 'document.run.xml'.
\openout1 = `document.run.xml'.
)
Here is how much of TeX's memory you used:
25112 strings out of 492982
395463 string characters out of 6134895
25151 strings out of 492982
396355 string characters out of 6134895
1018656 words of memory out of 5000000
27443 multiletter control sequences out of 15000+600000
20231 words of font info for 58 fonts, out of 8000000 for 9000
27463 multiletter control sequences out of 15000+600000
21245 words of font info for 60 fonts, out of 8000000 for 9000
1141 hyphenation exceptions out of 8191
45i,18n,78p,2008b,1838s stack positions out of 5000i,500n,10000p,200000b,80000s
{/usr/share/texmf/fonts/enc/dvips/cm-super/cm-super-t1.enc}</usr/share/texliv
@ -1514,19 +1543,20 @@ type1/public/amsfonts/cm/cmr12.pfb></usr/share/texlive/texmf-dist/fonts/type1/p
ublic/amsfonts/cm/cmr8.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/am
sfonts/cm/cmsy10.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts
/cm/cmsy6.pfb></usr/share/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cms
y8.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfbx1000.pfb></usr/share/t
exmf/fonts/type1/public/cm-super/sfbx1200.pfb></usr/share/texmf/fonts/type1/pub
lic/cm-super/sfbx1440.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfbx172
8.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfrm0600.pfb></usr/share/te
xmf/fonts/type1/public/cm-super/sfrm1000.pfb></usr/share/texmf/fonts/type1/publ
ic/cm-super/sfrm1200.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfrm1440
.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfrm2488.pfb></usr/share/tex
mf/fonts/type1/public/cm-super/sfti1200.pfb></usr/share/texmf/fonts/type1/publi
c/cm-super/sftt1200.pfb>
Output written on document.pdf (101 pages, 2172582 bytes).
y8.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfbi1200.pfb></usr/share/t
exmf/fonts/type1/public/cm-super/sfbx1000.pfb></usr/share/texmf/fonts/type1/pub
lic/cm-super/sfbx1200.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfbx144
0.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfbx1728.pfb></usr/share/te
xmf/fonts/type1/public/cm-super/sfit1200.pfb></usr/share/texmf/fonts/type1/publ
ic/cm-super/sfrm0600.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfrm1000
.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfrm1200.pfb></usr/share/tex
mf/fonts/type1/public/cm-super/sfrm1440.pfb></usr/share/texmf/fonts/type1/publi
c/cm-super/sfrm2488.pfb></usr/share/texmf/fonts/type1/public/cm-super/sfti1200.
pfb></usr/share/texmf/fonts/type1/public/cm-super/sftt1200.pfb>
Output written on document.pdf (108 pages, 2423767 bytes).
PDF statistics:
2095 PDF objects out of 2487 (max. 8388607)
1913 compressed objects within 20 object streams
871 named destinations out of 1000 (max. 500000)
829 words of extra memory for PDF output out of 10000 (max. 10000000)
2175 PDF objects out of 2487 (max. 8388607)
1980 compressed objects within 20 object streams
886 named destinations out of 1000 (max. 500000)
855 words of extra memory for PDF output out of 10000 (max. 10000000)

View File

@ -26,61 +26,63 @@
\BOOKMARK [3][-]{subsubsection.7.5.3}{\376\377\000K\000e\000r\000a\000s\000\040\000a\000n\000d\000\040\000T\000e\000n\000s\000o\000r\000F\000l\000o\000w}{subsection.7.5}% 26
\BOOKMARK [3][-]{subsubsection.7.5.4}{\376\377\000O\000p\000t\000i\000m\000i\000s\000e\000r\000s}{subsection.7.5}% 27
\BOOKMARK [3][-]{subsubsection.7.5.5}{\376\377\000R\000e\000g\000u\000l\000a\000r\000i\000s\000a\000t\000i\000o\000n}{subsection.7.5}% 28
\BOOKMARK [2][-]{subsection.7.6}{\376\377\000M\000a\000c\000h\000i\000n\000e\000\040\000L\000e\000a\000r\000n\000i\000n\000g}{section.7}% 29
\BOOKMARK [3][-]{subsubsection.7.6.1}{\376\377\000N\000a\000i\000v\000e\000\040\000B\000a\000y\000e\000s}{subsection.7.6}% 30
\BOOKMARK [2][-]{subsection.7.7}{\376\377\000B\000a\000g\000\040\000O\000f\000\040\000W\000o\000r\000d\000s}{section.7}% 31
\BOOKMARK [2][-]{subsection.7.8}{\376\377\000T\000F\000-\000I\000D\000F}{section.7}% 32
\BOOKMARK [2][-]{subsection.7.9}{\376\377\000A\000d\000d\000i\000c\000t\000i\000v\000e\000\040\000S\000m\000o\000o\000t\000h\000i\000n\000g}{section.7}% 33
\BOOKMARK [2][-]{subsection.7.10}{\376\377\000R\000e\000g\000r\000e\000s\000s\000i\000o\000n\000\040\000P\000e\000r\000f\000o\000r\000m\000a\000n\000c\000e\000\040\000M\000e\000t\000r\000i\000c\000s}{section.7}% 34
\BOOKMARK [1][-]{section.8}{\376\377\000S\000o\000l\000u\000t\000i\000o\000n\000\040\000A\000p\000p\000r\000o\000a\000c\000h}{}% 35
\BOOKMARK [2][-]{subsection.8.1}{\376\377\000D\000a\000t\000a\000\040\000g\000a\000t\000h\000e\000r\000i\000n\000g}{section.8}% 36
\BOOKMARK [2][-]{subsection.8.2}{\376\377\000D\000a\000t\000a\000\040\000p\000r\000e\000-\000p\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section.8}% 37
\BOOKMARK [2][-]{subsection.8.3}{\376\377\000S\000p\000a\000m\000\040\000F\000i\000l\000t\000e\000r\000i\000n\000g}{section.8}% 38
\BOOKMARK [2][-]{subsection.8.4}{\376\377\000L\000a\000n\000g\000u\000a\000g\000e\000\040\000D\000e\000t\000e\000c\000t\000i\000o\000n}{section.8}% 39
\BOOKMARK [2][-]{subsection.8.5}{\376\377\000S\000e\000n\000t\000i\000m\000e\000n\000t\000\040\000A\000n\000a\000l\000y\000s\000i\000s}{section.8}% 40
\BOOKMARK [2][-]{subsection.8.6}{\376\377\000N\000e\000u\000r\000a\000l\000\040\000N\000e\000t\000w\000o\000r\000k}{section.8}% 41
\BOOKMARK [2][-]{subsection.8.7}{\376\377\000P\000r\000i\000c\000e\000\040\000F\000o\000r\000e\000c\000a\000s\000t\000i\000n\000g}{section.8}% 42
\BOOKMARK [2][-]{subsection.8.8}{\376\377\000F\000r\000o\000n\000t\000e\000n\000d\000\040\000A\000p\000p\000l\000i\000c\000a\000t\000i\000o\000n}{section.8}% 43
\BOOKMARK [2][-]{subsection.8.9}{\376\377\000W\000i\000t\000h\000\040\000r\000e\000f\000e\000r\000e\000n\000c\000e\000\040\000t\000o\000\040\000I\000n\000i\000t\000i\000a\000l\000\040\000P\000I\000D}{section.8}% 44
\BOOKMARK [2][-]{subsection.8.10}{\376\377\000S\000o\000l\000u\000t\000i\000o\000n\000\040\000S\000u\000m\000m\000a\000r\000y}{section.8}% 45
\BOOKMARK [2][-]{subsection.8.11}{\376\377\000D\000a\000t\000a\000\040\000f\000l\000o\000w\000\040\000O\000v\000e\000r\000v\000i\000e\000w}{section.8}% 46
\BOOKMARK [1][-]{section.9}{\376\377\000S\000y\000s\000t\000e\000m\000\040\000D\000e\000s\000i\000g\000n}{}% 47
\BOOKMARK [2][-]{subsection.9.1}{\376\377\000D\000a\000t\000a\000f\000l\000o\000w\000\040\000D\000e\000s\000i\000g\000n\000s}{section.9}% 48
\BOOKMARK [2][-]{subsection.9.2}{\376\377\000I\000n\000t\000e\000r\000f\000a\000c\000e\000\040\000D\000e\000s\000i\000g\000n}{section.9}% 49
\BOOKMARK [1][-]{section.10}{\376\377\000I\000m\000p\000l\000e\000m\000e\000n\000t\000a\000t\000i\000o\000n}{}% 50
\BOOKMARK [2][-]{subsection.10.1}{\376\377\000D\000a\000t\000a\000\040\000c\000o\000l\000l\000e\000c\000t\000i\000o\000n}{section.10}% 51
\BOOKMARK [3][-]{subsubsection.10.1.1}{\376\377\000P\000r\000i\000c\000e\000\040\000T\000i\000m\000e\000-\000S\000e\000r\000i\000e\000s\000\040\000H\000i\000s\000t\000o\000r\000i\000c\000a\000l\000\040\000D\000a\000t\000a}{subsection.10.1}% 52
\BOOKMARK [3][-]{subsubsection.10.1.2}{\376\377\000P\000r\000i\000c\000e\000\040\000T\000i\000m\000e\000-\000S\000e\000r\000i\000e\000s\000\040\000L\000i\000v\000e\000\040\000D\000a\000t\000a}{subsection.10.1}% 53
\BOOKMARK [3][-]{subsubsection.10.1.3}{\376\377\000H\000i\000s\000t\000o\000r\000i\000c\000a\000l\000\040\000T\000w\000e\000e\000t\000\040\000C\000o\000l\000l\000e\000c\000t\000i\000o\000n}{subsection.10.1}% 54
\BOOKMARK [3][-]{subsubsection.10.1.4}{\376\377\000L\000i\000v\000e\000\040\000T\000w\000e\000e\000t\000\040\000C\000o\000l\000l\000e\000c\000t\000i\000o\000n}{subsection.10.1}% 55
\BOOKMARK [2][-]{subsection.10.2}{\376\377\000D\000a\000t\000a\000\040\000p\000r\000e\000-\000p\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section.10}% 56
\BOOKMARK [3][-]{subsubsection.10.2.1}{\376\377\000T\000w\000e\000e\000t\000\040\000F\000i\000l\000t\000e\000r\000i\000n\000g}{subsection.10.2}% 57
\BOOKMARK [3][-]{subsubsection.10.2.2}{\376\377\000L\000a\000n\000g\000u\000a\000g\000e\000\040\000d\000e\000t\000e\000c\000t\000i\000o\000n\000\040\000f\000i\000l\000t\000e\000r\000i\000n\000g}{subsection.10.2}% 58
\BOOKMARK [3][-]{subsubsection.10.2.3}{\376\377\000S\000p\000a\000m\000\040\000f\000i\000l\000t\000e\000r\000\040\000-\000\040\000T\000o\000k\000e\000n\000i\000s\000a\000t\000i\000o\000n\000,\000\040\000N\000g\000r\000a\000m\000s\000,\000\040\000S\000t\000o\000p\000w\000o\000r\000d\000\040\000r\000e\000m\000o\000v\000a\000l\000\040\000a\000n\000d\000\040\000S\000t\000e\000m\000m\000i\000n\000g}{subsection.10.2}% 59
\BOOKMARK [2][-]{subsection.10.3}{\376\377\000S\000p\000a\000m\000\040\000F\000i\000l\000t\000e\000r\000i\000n\000g}{section.10}% 60
\BOOKMARK [3][-]{subsubsection.10.3.1}{\376\377\000N\000a\000i\000v\000e\000\040\000B\000a\000y\000e\000s\000\040\000m\000o\000d\000e\000l}{subsection.10.3}% 61
\BOOKMARK [3][-]{subsubsection.10.3.2}{\376\377\000C\000l\000a\000s\000s\000i\000f\000i\000c\000a\000t\000i\000o\000n}{subsection.10.3}% 62
\BOOKMARK [3][-]{subsubsection.10.3.3}{\376\377\000P\000r\000e\000d\000i\000c\000t}{subsection.10.3}% 63
\BOOKMARK [3][-]{subsubsection.10.3.4}{\376\377\000M\000e\000t\000r\000i\000c\000s}{subsection.10.3}% 64
\BOOKMARK [2][-]{subsection.10.4}{\376\377\000S\000e\000n\000t\000i\000m\000e\000n\000t\000\040\000A\000n\000a\000l\000y\000s\000i\000s}{section.10}% 65
\BOOKMARK [2][-]{subsection.10.5}{\376\377\000R\000e\000c\000u\000r\000r\000e\000n\000t\000\040\000N\000e\000u\000r\000a\000l\000\040\000N\000e\000t\000w\000o\000r\000k\000\040\000-\000\040\000L\000S\000T\000M}{section.10}% 66
\BOOKMARK [3][-]{subsubsection.10.5.1}{\376\377\000D\000a\000t\000a\000s\000e\000t\000\040\000C\000r\000e\000a\000t\000i\000o\000n}{subsection.10.5}% 67
\BOOKMARK [3][-]{subsubsection.10.5.2}{\376\377\000T\000r\000a\000i\000n\000i\000n\000g\000\040\000a\000n\000d\000\040\000T\000e\000s\000t\000i\000n\000g\000\040\000M\000o\000d\000e\000l}{subsection.10.5}% 68
\BOOKMARK [2][-]{subsection.10.6}{\376\377\000F\000u\000t\000u\000r\000e\000\040\000P\000r\000e\000d\000i\000c\000t\000i\000o\000n\000\040\000F\000o\000r\000e\000c\000a\000s\000t\000i\000n\000g}{section.10}% 69
\BOOKMARK [2][-]{subsection.10.7}{\376\377\000U\000s\000e\000r\000\040\000I\000n\000t\000e\000r\000f\000a\000c\000e}{section.10}% 70
\BOOKMARK [3][-]{subsubsection.10.7.1}{\376\377\000K\000e\000y\000\040\000F\000u\000n\000c\000t\000i\000o\000n\000s}{subsection.10.7}% 71
\BOOKMARK [3][-]{subsubsection.10.7.2}{\376\377\000F\000i\000n\000a\000l\000\040\000I\000n\000t\000e\000r\000f\000a\000c\000e}{subsection.10.7}% 72
\BOOKMARK [1][-]{section.11}{\376\377\000T\000e\000s\000t\000i\000n\000g\000\040\000M\000e\000t\000r\000i\000c\000s\000\040\000a\000n\000d\000\040\000A\000c\000c\000u\000r\000a\000c\000y}{}% 73
\BOOKMARK [2][-]{subsection.11.1}{\376\377\000I\000n\000t\000e\000g\000r\000a\000t\000i\000o\000n\000\040\000T\000e\000s\000t\000i\000n\000g}{section.11}% 74
\BOOKMARK [2][-]{subsection.11.2}{\376\377\000A\000c\000c\000u\000r\000a\000c\000y\000\040\000o\000f\000\040\000M\000o\000d\000e\000l\000\040\000\046\000\040\000R\000e\000s\000u\000l\000t\000s}{section.11}% 75
\BOOKMARK [3][-]{subsubsection.11.2.1}{\376\377\000R\000e\000s\000u\000l\000t\000s\000\040\000D\000i\000s\000c\000u\000s\000s\000i\000o\000n}{subsection.11.2}% 76
\BOOKMARK [3][-]{subsubsection.11.2.2}{\376\377\000E\000x\000e\000c\000u\000t\000i\000o\000n\000\040\000S\000p\000e\000e\000d\000s}{subsection.11.2}% 77
\BOOKMARK [1][-]{section.12}{\376\377\000P\000r\000o\000j\000e\000c\000t\000\040\000E\000v\000a\000l\000u\000a\000t\000i\000o\000n}{}% 78
\BOOKMARK [1][-]{section.13}{\376\377\000D\000i\000s\000c\000u\000s\000s\000i\000o\000n\000:\000\040\000C\000o\000n\000t\000r\000i\000b\000u\000t\000i\000o\000n\000\040\000a\000n\000d\000\040\000R\000e\000f\000l\000e\000c\000t\000i\000o\000n}{}% 79
\BOOKMARK [2][-]{subsection.13.1}{\376\377\000L\000i\000m\000i\000t\000a\000t\000i\000o\000n\000s}{section.13}% 80
\BOOKMARK [1][-]{section.14}{\376\377\000C\000o\000n\000c\000l\000u\000s\000i\000o\000n\000\040\000a\000n\000d\000\040\000F\000u\000t\000u\000r\000e\000\040\000I\000m\000p\000r\000o\000v\000e\000m\000e\000n\000t\000s}{}% 81
\BOOKMARK [2][-]{subsection.14.1}{\376\377\000C\000o\000n\000c\000l\000u\000s\000i\000o\000n}{section.14}% 82
\BOOKMARK [2][-]{subsection.14.2}{\376\377\000F\000u\000t\000u\000r\000e\000\040\000I\000m\000p\000r\000o\000v\000e\000m\000e\000n\000t\000s}{section.14}% 83
\BOOKMARK [1][-]{section.15}{\376\377\000A\000p\000p\000e\000n\000d\000i\000c\000e\000s}{}% 84
\BOOKMARK [2][-]{subsection.15.1}{\376\377\000A\000p\000p\000e\000n\000d\000i\000x\000\040\000A\000\040\000-\000\040\000P\000r\000o\000j\000e\000c\000t\000\040\000I\000n\000i\000t\000i\000a\000t\000i\000o\000n\000\040\000D\000o\000c\000u\000m\000e\000n\000t}{section.15}% 85
\BOOKMARK [2][-]{subsection.15.2}{\376\377\000A\000p\000p\000e\000n\000d\000i\000x\000\040\000B\000\040\000-\000\040\000L\000o\000g\000\040\000b\000o\000o\000k}{section.15}% 86
\BOOKMARK [3][-]{subsubsection.7.5.6}{\376\377\000D\000r\000o\000p\000o\000u\000t}{subsection.7.5}% 29
\BOOKMARK [2][-]{subsection.7.6}{\376\377\000M\000a\000c\000h\000i\000n\000e\000\040\000L\000e\000a\000r\000n\000i\000n\000g}{section.7}% 30
\BOOKMARK [3][-]{subsubsection.7.6.1}{\376\377\000N\000a\000i\000v\000e\000\040\000B\000a\000y\000e\000s}{subsection.7.6}% 31
\BOOKMARK [2][-]{subsection.7.7}{\376\377\000B\000a\000g\000\040\000O\000f\000\040\000W\000o\000r\000d\000s}{section.7}% 32
\BOOKMARK [2][-]{subsection.7.8}{\376\377\000T\000F\000-\000I\000D\000F}{section.7}% 33
\BOOKMARK [2][-]{subsection.7.9}{\376\377\000A\000d\000d\000i\000c\000t\000i\000v\000e\000\040\000S\000m\000o\000o\000t\000h\000i\000n\000g}{section.7}% 34
\BOOKMARK [2][-]{subsection.7.10}{\376\377\000R\000e\000g\000r\000e\000s\000s\000i\000o\000n\000\040\000P\000e\000r\000f\000o\000r\000m\000a\000n\000c\000e\000\040\000M\000e\000t\000r\000i\000c\000s}{section.7}% 35
\BOOKMARK [1][-]{section.8}{\376\377\000S\000o\000l\000u\000t\000i\000o\000n\000\040\000A\000p\000p\000r\000o\000a\000c\000h}{}% 36
\BOOKMARK [2][-]{subsection.8.1}{\376\377\000D\000a\000t\000a\000\040\000g\000a\000t\000h\000e\000r\000i\000n\000g}{section.8}% 37
\BOOKMARK [2][-]{subsection.8.2}{\376\377\000D\000a\000t\000a\000\040\000p\000r\000e\000-\000p\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section.8}% 38
\BOOKMARK [2][-]{subsection.8.3}{\376\377\000S\000p\000a\000m\000\040\000F\000i\000l\000t\000e\000r\000i\000n\000g}{section.8}% 39
\BOOKMARK [2][-]{subsection.8.4}{\376\377\000L\000a\000n\000g\000u\000a\000g\000e\000\040\000D\000e\000t\000e\000c\000t\000i\000o\000n}{section.8}% 40
\BOOKMARK [2][-]{subsection.8.5}{\376\377\000S\000e\000n\000t\000i\000m\000e\000n\000t\000\040\000A\000n\000a\000l\000y\000s\000i\000s}{section.8}% 41
\BOOKMARK [2][-]{subsection.8.6}{\376\377\000N\000e\000u\000r\000a\000l\000\040\000N\000e\000t\000w\000o\000r\000k}{section.8}% 42
\BOOKMARK [2][-]{subsection.8.7}{\376\377\000P\000r\000i\000c\000e\000\040\000F\000o\000r\000e\000c\000a\000s\000t\000i\000n\000g}{section.8}% 43
\BOOKMARK [2][-]{subsection.8.8}{\376\377\000F\000r\000o\000n\000t\000e\000n\000d\000\040\000A\000p\000p\000l\000i\000c\000a\000t\000i\000o\000n}{section.8}% 44
\BOOKMARK [2][-]{subsection.8.9}{\376\377\000W\000i\000t\000h\000\040\000r\000e\000f\000e\000r\000e\000n\000c\000e\000\040\000t\000o\000\040\000I\000n\000i\000t\000i\000a\000l\000\040\000P\000I\000D}{section.8}% 45
\BOOKMARK [2][-]{subsection.8.10}{\376\377\000S\000o\000l\000u\000t\000i\000o\000n\000\040\000S\000u\000m\000m\000a\000r\000y}{section.8}% 46
\BOOKMARK [2][-]{subsection.8.11}{\376\377\000I\000n\000i\000t\000i\000a\000l\000\040\000D\000a\000t\000a\000\040\000f\000l\000o\000w\000\040\000O\000v\000e\000r\000v\000i\000e\000w}{section.8}% 47
\BOOKMARK [1][-]{section.9}{\376\377\000S\000y\000s\000t\000e\000m\000\040\000D\000e\000s\000i\000g\000n}{}% 48
\BOOKMARK [2][-]{subsection.9.1}{\376\377\000D\000a\000t\000a\000f\000l\000o\000w\000\040\000D\000e\000s\000i\000g\000n\000s}{section.9}% 49
\BOOKMARK [2][-]{subsection.9.2}{\376\377\000I\000n\000t\000e\000r\000f\000a\000c\000e\000\040\000D\000e\000s\000i\000g\000n}{section.9}% 50
\BOOKMARK [1][-]{section.10}{\376\377\000I\000m\000p\000l\000e\000m\000e\000n\000t\000a\000t\000i\000o\000n}{}% 51
\BOOKMARK [2][-]{subsection.10.1}{\376\377\000D\000a\000t\000a\000\040\000c\000o\000l\000l\000e\000c\000t\000i\000o\000n}{section.10}% 52
\BOOKMARK [3][-]{subsubsection.10.1.1}{\376\377\000P\000r\000i\000c\000e\000\040\000T\000i\000m\000e\000-\000S\000e\000r\000i\000e\000s\000\040\000H\000i\000s\000t\000o\000r\000i\000c\000a\000l\000\040\000D\000a\000t\000a}{subsection.10.1}% 53
\BOOKMARK [3][-]{subsubsection.10.1.2}{\376\377\000P\000r\000i\000c\000e\000\040\000T\000i\000m\000e\000-\000S\000e\000r\000i\000e\000s\000\040\000L\000i\000v\000e\000\040\000D\000a\000t\000a}{subsection.10.1}% 54
\BOOKMARK [3][-]{subsubsection.10.1.3}{\376\377\000H\000i\000s\000t\000o\000r\000i\000c\000a\000l\000\040\000T\000w\000e\000e\000t\000\040\000C\000o\000l\000l\000e\000c\000t\000i\000o\000n}{subsection.10.1}% 55
\BOOKMARK [3][-]{subsubsection.10.1.4}{\376\377\000L\000i\000v\000e\000\040\000T\000w\000e\000e\000t\000\040\000C\000o\000l\000l\000e\000c\000t\000i\000o\000n}{subsection.10.1}% 56
\BOOKMARK [2][-]{subsection.10.2}{\376\377\000D\000a\000t\000a\000\040\000p\000r\000e\000-\000p\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section.10}% 57
\BOOKMARK [3][-]{subsubsection.10.2.1}{\376\377\000T\000w\000e\000e\000t\000\040\000F\000i\000l\000t\000e\000r\000i\000n\000g}{subsection.10.2}% 58
\BOOKMARK [3][-]{subsubsection.10.2.2}{\376\377\000L\000a\000n\000g\000u\000a\000g\000e\000\040\000d\000e\000t\000e\000c\000t\000i\000o\000n\000\040\000f\000i\000l\000t\000e\000r\000i\000n\000g}{subsection.10.2}% 59
\BOOKMARK [3][-]{subsubsection.10.2.3}{\376\377\000S\000p\000a\000m\000\040\000f\000i\000l\000t\000e\000r\000\040\000-\000\040\000T\000o\000k\000e\000n\000i\000s\000a\000t\000i\000o\000n\000,\000\040\000N\000g\000r\000a\000m\000s\000,\000\040\000S\000t\000o\000p\000w\000o\000r\000d\000\040\000r\000e\000m\000o\000v\000a\000l\000\040\000a\000n\000d\000\040\000S\000t\000e\000m\000m\000i\000n\000g}{subsection.10.2}% 60
\BOOKMARK [2][-]{subsection.10.3}{\376\377\000S\000p\000a\000m\000\040\000F\000i\000l\000t\000e\000r\000i\000n\000g}{section.10}% 61
\BOOKMARK [3][-]{subsubsection.10.3.1}{\376\377\000N\000a\000i\000v\000e\000\040\000B\000a\000y\000e\000s\000\040\000m\000o\000d\000e\000l}{subsection.10.3}% 62
\BOOKMARK [3][-]{subsubsection.10.3.2}{\376\377\000C\000l\000a\000s\000s\000i\000f\000i\000c\000a\000t\000i\000o\000n}{subsection.10.3}% 63
\BOOKMARK [3][-]{subsubsection.10.3.3}{\376\377\000P\000r\000e\000d\000i\000c\000t}{subsection.10.3}% 64
\BOOKMARK [3][-]{subsubsection.10.3.4}{\376\377\000M\000e\000t\000r\000i\000c\000s}{subsection.10.3}% 65
\BOOKMARK [2][-]{subsection.10.4}{\376\377\000S\000e\000n\000t\000i\000m\000e\000n\000t\000\040\000A\000n\000a\000l\000y\000s\000i\000s}{section.10}% 66
\BOOKMARK [2][-]{subsection.10.5}{\376\377\000R\000e\000c\000u\000r\000r\000e\000n\000t\000\040\000N\000e\000u\000r\000a\000l\000\040\000N\000e\000t\000w\000o\000r\000k\000\040\000-\000\040\000L\000S\000T\000M}{section.10}% 67
\BOOKMARK [3][-]{subsubsection.10.5.1}{\376\377\000D\000a\000t\000a\000s\000e\000t\000\040\000C\000r\000e\000a\000t\000i\000o\000n}{subsection.10.5}% 68
\BOOKMARK [3][-]{subsubsection.10.5.2}{\376\377\000T\000r\000a\000i\000n\000i\000n\000g\000\040\000a\000n\000d\000\040\000T\000e\000s\000t\000i\000n\000g\000\040\000M\000o\000d\000e\000l}{subsection.10.5}% 69
\BOOKMARK [2][-]{subsection.10.6}{\376\377\000F\000u\000t\000u\000r\000e\000\040\000P\000r\000e\000d\000i\000c\000t\000i\000o\000n\000\040\000F\000o\000r\000e\000c\000a\000s\000t\000i\000n\000g}{section.10}% 70
\BOOKMARK [2][-]{subsection.10.7}{\376\377\000U\000s\000e\000r\000\040\000I\000n\000t\000e\000r\000f\000a\000c\000e}{section.10}% 71
\BOOKMARK [3][-]{subsubsection.10.7.1}{\376\377\000K\000e\000y\000\040\000F\000u\000n\000c\000t\000i\000o\000n\000s}{subsection.10.7}% 72
\BOOKMARK [3][-]{subsubsection.10.7.2}{\376\377\000F\000i\000n\000a\000l\000\040\000I\000n\000t\000e\000r\000f\000a\000c\000e}{subsection.10.7}% 73
\BOOKMARK [1][-]{section.11}{\376\377\000T\000e\000s\000t\000i\000n\000g\000\040\000M\000e\000t\000r\000i\000c\000s\000\040\000a\000n\000d\000\040\000A\000c\000c\000u\000r\000a\000c\000y}{}% 74
\BOOKMARK [2][-]{subsection.11.1}{\376\377\000I\000n\000t\000e\000g\000r\000a\000t\000i\000o\000n\000\040\000T\000e\000s\000t\000i\000n\000g}{section.11}% 75
\BOOKMARK [2][-]{subsection.11.2}{\376\377\000A\000c\000c\000u\000r\000a\000c\000y\000\040\000o\000f\000\040\000M\000o\000d\000e\000l\000\040\000\046\000\040\000R\000e\000s\000u\000l\000t\000s}{section.11}% 76
\BOOKMARK [3][-]{subsubsection.11.2.1}{\376\377\000R\000e\000s\000u\000l\000t\000s\000\040\000D\000i\000s\000c\000u\000s\000s\000i\000o\000n}{subsection.11.2}% 77
\BOOKMARK [3][-]{subsubsection.11.2.2}{\376\377\000E\000x\000e\000c\000u\000t\000i\000o\000n\000\040\000S\000p\000e\000e\000d\000s}{subsection.11.2}% 78
\BOOKMARK [1][-]{section.12}{\376\377\000D\000i\000s\000c\000u\000s\000s\000i\000o\000n\000:\000\040\000C\000o\000n\000t\000r\000i\000b\000u\000t\000i\000o\000n\000\040\000a\000n\000d\000\040\000R\000e\000f\000l\000e\000c\000t\000i\000o\000n}{}% 79
\BOOKMARK [2][-]{subsection.12.1}{\376\377\000L\000i\000m\000i\000t\000a\000t\000i\000o\000n\000s}{section.12}% 80
\BOOKMARK [2][-]{subsection.12.2}{\376\377\000R\000e\000f\000l\000e\000c\000t\000i\000o\000n}{section.12}% 81
\BOOKMARK [1][-]{section.13}{\376\377\000S\000o\000c\000i\000a\000l\000,\000\040\000L\000e\000g\000a\000l\000\040\000a\000n\000d\000\040\000E\000t\000h\000i\000c\000a\000l\000\040\000I\000s\000s\000u\000e\000s}{}% 82
\BOOKMARK [1][-]{section.14}{\376\377\000C\000o\000n\000c\000l\000u\000s\000i\000o\000n\000\040\000a\000n\000d\000\040\000F\000u\000t\000u\000r\000e\000\040\000I\000m\000p\000r\000o\000v\000e\000m\000e\000n\000t\000s}{}% 83
\BOOKMARK [2][-]{subsection.14.1}{\376\377\000C\000o\000n\000c\000l\000u\000s\000i\000o\000n}{section.14}% 84
\BOOKMARK [2][-]{subsection.14.2}{\376\377\000F\000u\000t\000u\000r\000e\000\040\000I\000m\000p\000r\000o\000v\000e\000m\000e\000n\000t\000s}{section.14}% 85
\BOOKMARK [1][-]{section.15}{\376\377\000A\000p\000p\000e\000n\000d\000i\000c\000e\000s}{}% 86
\BOOKMARK [2][-]{subsection.15.1}{\376\377\000A\000p\000p\000e\000n\000d\000i\000x\000\040\000A\000\040\000-\000\040\000P\000r\000o\000j\000e\000c\000t\000\040\000I\000n\000i\000t\000i\000a\000t\000i\000o\000n\000\040\000D\000o\000c\000u\000m\000e\000n\000t}{section.15}% 87
\BOOKMARK [2][-]{subsection.15.2}{\376\377\000A\000p\000p\000e\000n\000d\000i\000x\000\040\000B\000\040\000-\000\040\000L\000o\000g\000\040\000b\000o\000o\000k}{section.15}% 88

Binary file not shown.

Binary file not shown.

View File

@ -153,17 +153,27 @@
Doubling Down - Is to take further risk on a stock by doubling effort/investment in a hope and attempt to raise the price
RNN - Recurrent Neural Network
RNN - Recurrent Neural Network - A type of neural network that remembers weights and parameters for learning
LSTM - Long-Short Term Memory Neural Network
LSTM - Long-Short Term Memory Neural Network - A type of recurrent neural network that overcomes limitations of standard RNNs
RMSE - Root Mean Squared Error
RMSE - Root Mean Squared Error - Regression performance metric
MSE - Mean Squared Error
MSE - Mean Squared Error - Regression performance metric
MAE - Mean Absolute Error
MAE - Mean Absolute Error - Regression performance metric
MAPE - Mean Absolute Percentage Error
MAPE - Mean Absolute Percentage Error - Regression performance metric
HTML - Hyper-Text Markup Language - Scripting language used for forming web pages.
CSS - Cascading Style Sheets - A scripting language used to customise the design of the interface of a webpage coded in HTML
JSON - JavaScript Object Notation - Open Standard file format that uses human readable text to transmit data objects
CSV - Comma-Separated Values - Tabular file format that uses commas to separate values
API - Application Programming Interface - A set of web endpoints, communication protocols and tools for building software
\newpage
@ -200,7 +210,7 @@
\subsection{Problem Statement}\label{statement}
The fundamental problems this project attempts to address are that of, an open-source system available to the public that aids in the analysis and prediction of BTC. The accuracy of open-source tools and technology when applied to the trading market scene and to identify whether there is a correlation between Twitter sentiment and BTC price fluctuation. While there are existing tools, only a few are available to the public and only provide basic functionality, while others are kept in-house of major corporations who invest in this problem domain.
The fundamental problems this project attempts to address are that of, an open-source system available to the public that aids in investor decision making on whether to invest at a given point, based on the analysis and prediction of BTC for each hour. The accuracy of open-source tools and technology when applied to the trading market scene and to identify whether there is a correlation between Twitter sentiment and BTC price fluctuation. While there are existing tools, only a few are available to the public and only provide basic functionality, while others are kept in-house of major corporations who invest in this problem domain.
The other issue presented here is that assuming perfect accuracy can be achieved is naive. As this project will only be using existing tools and technologies; thus, there are limitations to the accuracy of what can be obtained. One of that being the suitability of the tools, there are no open-source sentiment analysers for stock market prediction, thus finding a specifically trained analyser for the chosen domain in highly unlikely. In relation, finding the most suitable machine learning or neural network is equally important as this will determine the accuracy of the predictions. Due to being a regression problem, machine learning techniques and neural networks that focus around this and forecasting should be considered.
@ -234,7 +244,7 @@
\textbf{General}:
\begin{itemize}
\item To investigate into the use of lexicon-dictionary based sentiment analyser approach in for sentiment analysis and it's customisability for a given topic domain
\item To create a system that can predict the next hour of Bitcoins price when given the price and sentiment for the past hour
\item To create a system that can predict the next hour of Bitcoin's price when given the price and sentiment for the past hour
\item To investigate into natural language data pre-processing techniques and how these could be used to filter out unwanted data
\item To investigate into the use of a neural network, specifically an LSTM for forecasting price data
\item Ultimatly, to investigate into how the use of sentiment effects the prediction of price for the next hour
@ -358,7 +368,7 @@
\item Stopword removal: Are commonly used words (such as "the","in","a") that provide no meaning to the sentiment of a given text
\item Stemming: Is used to replace words with common suffixes and prefixes, as in "go" and "goes" fundamentally convey the same meaning. A stemmer will replace such words with their reduced counterparts
\item Term Probability Identification and Feature Extraction: This is a process that involves identifying the most frequently used words in a given text, by using a probability type approach on a pre-defined dataset which classifies a range of texts as with overall negative or positive a machine learning algorithm is trained to classify these accordingly.
\item Ngrams: Are a contiguous sequence of n items from a given sample of text. The use of Ngrams in natural language processing can improve the accuracy of classification. For example: Good and Not Good have opposite meanings. By only using 1 token (1gram) not good (not and good) can be incorrectly classified. As the english language contains a significant amount of 2gram type word chains using 2gram can improve the accuracy of classification.
\item Ngrams: Are a contiguous sequence of n items from a given sample of text. The use of Ngrams in natural language processing can improve the accuracy of classification. For example: 'Good' and 'Not Good' have opposite meanings. By only using 1 token (1gram) 'not good' ('not' and 'good') can be incorrectly classified. As the english language contains a significant amount of 2gram type word chains using 2gram can improve the accuracy of classification.
\end{itemize}
The former, seen and has been proven to provide higher accuracy than traditional machine learning approaches \cite{LexiconSocSent}, and need little pre-processing conducted on the data as words have a pre-defined sentiment classification in a provided lexicon. Although these lexicons can be complex to create, they generally require little resources to use and alter.
@ -451,7 +461,7 @@
($G_t$ is the sum of the squares of the past gradients to $\theta$)
\end{center}
\item RMSProp (Root Mean Square Propagation): Aims to resolve Adagrads radically diminishing learning rates by using a moving average of the squared gradient. Thus utilises the magnitude of the recent gradient descent to normalise it, and gets adjusted automatically by choosing different learning rate for each parameter. \cite{OptVariants}
\item RMSProp (Root Mean Square Propagation): Aims to resolve Adagrad's radically diminishing learning rates by using a moving average of the squared gradient. Thus utilises the magnitude of the recent gradient descent to normalise it, and gets adjusted automatically by choosing different learning rate for each parameter. \cite{OptVariants}
\[\theta_{t+1} = \theta_t - \frac{\eta}{\sqrt{(1 - \gamma) g^2_{t-1} + \gamma g_t + \epsilon}} \cdot g_t\]
@ -459,7 +469,7 @@
($\gamma$ - decay that takes value from 0-1. $g_t$ - moving average of squared gradients)
\end{center} \cite{OverOpt}
\item Adam (Adaptive Moment Estimation): Also aims to resolve Adagrads diminishing learning rates, by calculates the adaptive learning rate for each parameter. Being one of the most popular gradient descent optimisation algorithms, it estimates from the 1st and 2nd moments of gradients. Adam implements the exponential moving average of the gradients to scale the learning rate of the network and keeps an average of past gradients. \cite{Adam}
\item Adam (Adaptive Moment Estimation): Also aims to resolve Adagrad's diminishing learning rates, by calculates the adaptive learning rate for each parameter. Being one of the most popular gradient descent optimisation algorithms, it estimates from the 1st and 2nd moments of gradients. Adam implements the exponential moving average of the gradients to scale the learning rate of the network and keeps an average of past gradients. \cite{Adam}
\[m_t = \beta_1 m_{t-1} + (1 - \beta_1) g_t\]
\[v_t = \beta_2 v_{t-1} + (1 - \beta_2) g^2_t\]
@ -482,7 +492,10 @@
\end{itemize}
\subsubsection{Regularisation}
To avoid issues such as overfitting of a neural networks model, techniques such as regularisation to produce better predictive performance and to improve variance of the model created. \cite{RegularisationSc}
To avoid issues such as overfitting of the model of a neural network, techniques such as regularisation to produce better predictive performance and to improve variance of the model created. \cite{RegularisationSc} Regularisation is a technique that involves modifying the error function of the network, calculated as the sum of squares errors for individual training and validation samples. This adds a term to the error function which decreases the weights and biases of the network to smooth out outputs of each layer and LSTM cell, thus making the network less likely to overfit.
\subsubsection{Dropout}
Dropout is a method of reducing under and overfitting of a network, by ignoring neurons during the training phase of model creation where a particular set of neurons are chosen at random to be ignored. Due to connected layers in a neural network, especially in an LSTM network, occupy the majority of the parameters neurons can develop a co-dependency on and from each other during the training phase of the network which reduces the individual efficiency of each neuron and can lead to the overfitting of training data. \cite{dropoutKeras}\cite{dropoutM}
\subsection{Machine Learning}\label{machine}
\subsubsection{Naive Bayes}
@ -516,7 +529,7 @@
\[P(H\cap A_1 ... A_n) = \frac{P(A_1\cap H) * P(A_2\cap H) ... * P(A_n\cap H) * P(H)}{P(A_1) * P(A_2) ... * P(A_n)} \]
\[Probability \ of \ Outcome \cap Evidence = \frac{Probability \ of \ Likelihood \ of \ evidence * Prior}{Probability \ of \ Evidence} \]
\[Probability \ of \ Outcome\ \cap \ Evidence = \frac{Probability \ of \ Likelihood \ of \ evidence * Prior}{Probability \ of \ Evidence} \]
The naive Bayes approach has many applications, especially for the topic of this project in classifying the probability occurrence of the next price. Although it is a robust algorithm has its drawbacks which make it not as suitable as a neural network for the given need of this project. The naive Bayes trap is an issue that may occur due to the size of the dataset that will be used. There are however other scenarios this algorithm could be used such as classification of spam data.\cite{StudyNBC}
@ -674,8 +687,8 @@
\newpage
\subsection{Data flow Overview}\label{data-flow}
To get an understanding of how the system will be put together, a dataflow diagram is a useful method for view how systems are integrated and how data could possibly flow through a system.
\subsection{Initial Data flow Overview}\label{data-flow}
To get an understanding of how the system will be put together, a dataflow diagram is a useful method for view how systems are integrated and how data could possibly flow through a system. \textit{Figure 4} shows the initial idea of how the system will be constructed and how data will flow and possibly processed at each stage of the system, and will be expanded and built upon in the system design section.
\includegraphics[width=18cm,height=8cm]{images/Generic_Flow.png}
\begin{center}
@ -711,7 +724,7 @@
\subitem Live data is extracted directly from the three exchanges APIs shown through REST endpoint requests.
\subitem Data from both, as separate processes independent from one another, are averaged by extracting the \textit{High}, \textit{Mid} and \textit{Low} hourly prices. This averaged price per hour for each exchange are then averaged together to obtain an unbiased hourly average. The price is then saved to a CSV of historical or live prices respectively. The difference in the flow of data is that of Live prices, in which the process is looped every hour to extract the new hourly prices.
\item Tweet Collector - Streams tweets from Twitter using Tweepy, historical tweets are manually collected directly from the Twitter API. Both are fed through the normalisation and data pre-processing stage.
\item Data pre-processing - This involves cleaning the initial data by removing line breaks and new lines that occur in the data, removal of special characters that are standard in tweets (\textit{'\#','\@' and urls}). The data is then fed into a language detection system which tokenises and compares stopwords in the text to NLTK package supported languages. Depending on whether the text is identified as being predominately English or not determines whether or not the tweet is dropped and not used in the network. If the majority is in English, non-English characters are removed as these can still be present in the text.
\item Data pre-processing - This involves cleaning the initial data by removing line breaks and new lines that occur in the data, removal of special characters that are standard in tweets (\textit{'\#','@' and urls}). The data is then fed into a language detection system which tokenises and compares stopwords in the text to NLTK package supported languages. Depending on whether the text is identified as being predominately English or not determines whether or not the tweet is dropped and not used in the network. If the majority is in English, non-English characters are removed as these can still be present in the text.
\end{itemize}
\textbf{Analysis Engine}
@ -788,7 +801,7 @@
\begin{center}
\section{Implementation}\label{implementation}
\end{center}
This section will outline the method and process of development of the system to satisfy the chosen solution, technical specification and the problem statement. Each section of the system will be outlined and discussed with relevant codes snippets of essential methods from the system to highlight the processing of data throughout. Additionally, the order in which the following sections are show was not the order in which they were developed, the order in which they are shown is to represent the order of how the data flows through the system, see \textit{section 9 - Ssytem Design} for an understanding of the flow of data through the system.
This section will outline the method and process of development of the system to satisfy the chosen solution, technical specification and the problem statement. Each section of the system will be outlined and discussed with relevant codes snippets of essential methods from the system to highlight the processing of data throughout. Additionally, the order in which the following sections are show was not the order in which they were developed, the order in which they are shown is to represent the order of how the data flows through the system, see \textit{section 9 - System Design} for an understanding of the flow of data through the system.
\newline
\subsection{Data collection}\label{collection}
@ -1401,7 +1414,7 @@ def create_sets(self, data, lookback, sentiment):
\subsubsection{Training and Testing Model}
The neural network is set up with four layers each of which configured with 100 LSTM cells, with a regularisation (dropout) of 0.2 each, and returning sequences to each other layer. A dropout was used to ensure that the data would not be overfitted, by setting the dropout to 0.2 probability, 80\% of the data on each layer is retained for the next layer. Return sequences allows for the returning of the hidden state output for each time step and ensures the next LSTM layer has 2 inputs that carry over from the previous layer, which are the old weights and value outputs from the previous layer.
The neural network is set up with four layers each of which configured with 100 LSTM cells, with a dropout of 0.2 each, and returning sequences to each other layer. A dropout was used to ensure that the data would not be overfitted, by setting the dropout to 0.2 probability, 80\% of the data on each layer is retained for the next layer. Return sequences allows for the returning of the hidden state output for each time step and ensures the next LSTM layer has 2 inputs that carry over from the previous layer, which are the old weights and value outputs from the previous layer.
\begin{lstlisting}[language=python, caption=LSTM model creation\, layering\, compiling and fitting]
self.model = Sequential()
@ -1612,12 +1625,13 @@ def create_sets(self, data, lookback, sentiment):
\newpage
\subsubsection{Final Interface}
As briefly described throughout this section, the final interface consists of three graphs and two tables. The graphs display relevant data on predictions over true hour price, performance metrics and prediction over true price from the start of execution of the forecasting function. Tables showing the next hour predicted price alongside current hour prices and sentiment, with time created and suggested market action based on the hard-coded difference threshold of 2.5\%, and another table displaying the average performance metrics of the given run of the network model. The interface shown is that of the network model with sentiment embedded, but the model without sentiment embedded is exactly the same, hosted on separate websites - \url{cryptosky.me} and \url{nosent.cryptosky.me}.
\begin{figure}[hbt!]
\centering
\includegraphics[width=14cm,height=19cm]{images/final_interface.png}
\begin{center}
\textit{Figure 12: Price Forecasting Dataflow diagram}
\textit{Figure 12: Final Interface - \url{cryptosky.me} and \url{nosent.cryptosky.me}}
\end{center}
\end{figure}
@ -1719,9 +1733,27 @@ def create_sets(self, data, lookback, sentiment):
15 records of predictions - \textbf{without} sentiment embedded
\end{center}
\begin{figure}[hbt!]
\centering
\includegraphics[width=16cm,height=6cm]{images/with_sentiment.png}
\begin{center}
\textit{Figure 13: \url{cryptosky.me} - model \textbf{with} sentiment embedded}
\end{center}
\end{figure}
\begin{figure}[hbt!]
\centering
\includegraphics[width=16cm,height=6cm]{images/without_sentiment.png}
\begin{center}
\textit{Figure 14: \url{nosent.cryptosky.me} - model \textbf{without} sentiment embedded}
\end{center}
\end{figure}
\newpage
On visual inspection, after both models had made 48 predictions, \textit{See above tables} showing 10 records as an example, it can be seen that the model with the sentiment embedded both follows the current price more closely and is not as conservative in its predictions as the model without sentiment.
How conservative the model without sentiment embedded is can seen in the five values between, 1 am to 5 am, where it attempts to correct itself to the actual value but slow to do so, then predicts a higher price for the next hour. At a point, the predicted value somewhat resembles that of the actual price but only due to the exact price rising substantially.
This model through the data shown, shows that it takes much longer to change the prediction to the actual real value of the next hour compared to the model with the embedded sentiment.
How conservative the model without sentiment embedded is can be seen in the five values between, 1 am to 5 am, where it attempts to correct itself to the actual value but slow to do so, then predicts a higher price for the next hour. At a point, the predicted value somewhat resembles that of the actual price but only due to the exact price rising substantially.
This model through the data shown shows that it takes much longer to change the prediction to the actual real value of the next hour compared to the model with the embedded sentiment.
Also, neither model handles spikes in prices very well which is more noticeable with the model with sentiment as it follows the exact price more closely.
Another factor that can be identified by the results shown above is that the model with the sentiment embedded in with the price data doesn't react adequately for when the sentiment spikes - regardless of a price spike. This could suggest that the data has not been predicted using enough data of both price and sentiment, due to only training on the last five live prices and sentiment and not 1000 samples to match the batch size the model was trained on during model creation. An improvement to the model could be made during predictions of the next hour price where it continuously predicts on the data available until it has 1000 records, then only predicts using the last 1000 records of live data. At this point, 1000 hours into predictions, said predictions might become more accurate than what is presented for evaluation at the time of writing due to matching the training batch sample size of the trained model of the network.
@ -1746,25 +1778,64 @@ def create_sets(self, data, lookback, sentiment):
\newpage
\section{Project Evaluation}
Reflection
Quality
\section{Discussion: Contribution and Reflection}
The testing performed and the results obtained show significant difference using sentiment alongside price can have on the forecasting of the next hours' price of Bitcoin, visually shown in \textit{figures 13 \& 14}. Results have also identified that both models do not handle spikes in both sentiment or price adequately, as seen in \textit{figures 13 \& 14} it usually takes another hour for the spike to be reflected in the predictions, longer so for the model without sentiment embedded. Also stated in the discussion of the results, it can be seen that the sentiment of the hour does not directly or immediately affect the next hours' actual price, even when there is a spike or drop in sentiment or price. As stated this could suggest that sentiment affects the price over a broader time range such as a few hours, and also shows that there is no direct correlation between, in the data presented, sentiment spikes to price spikes.
With testing and results discussed it is important to discuss some of the general issues that occurred during the development that contribute to the results shown and identifying correlations to solve the problem statement.
One issue is that of deployment and the need to have the system continuously running to make hourly predictions and to satisfy the problem statement of providing a system to aid in investor decision making of the market. Additionally, the system needs to continuously run to meet the batch file size, which will aid in determining if predicting on 1000 records shows if predictions become more accurate on each iteration after 1000 records have been obtained, as mentioned and described in the discussion of the results section.
The current solution that has been used to during development is to run the system for training purposes on a work laptop while the front-end is hosted on an external cloud server. On each hour a prediction is made a bash script watches the relevant files that contain predictions, performance metrics and correct over predicted values for graph plotting for changes, and is then SCP'd (Secure Connection Protocol) to the relative path on the cloud server for the front-end to access and display to the stakeholders.
Due to the cost and computational power needed, and system not working when initially attempting to deploy it to the server, it was impractical to host it on an external server for the given time frame needed for completion of this project.
A proper solution would see both systems deployed at a production level with both fully operational on the external server.
Another issue, relating to the issue just described is that on system execution being halted. The issue is that during the period in which the system is not running live prices and sentiment are not being collected, ignoring that predictions are not made. When the system is brought back online it will be missing the now 'historical' data for that given period when it was not running and collecting data; thus this data has to be manually entered. Due to the system not being deployed in 'production' and was ran from a personal laptop this issue frequently occurred, which would be mitigated if it was deployed to an external server. If the system were to crash on a production server, there are several try-catch statements throughout the system that will restart the processes if this issue occurs.
\subsection{Limitations}
The limitations of the system can be quite clear, as described extensively already with both models performances. However, it is important to identify which points in the technical specifications the final developed system did not meet.
Looking back at the technical specification laid out at the start of this project, there is only one point that this system does not meet. It is that of 'both prediction system and interface, should be deployed to a server due to the need to be constantly running'. As already discussed in the discussion section prior to this section, it was both unfeasible for several factors and to get fully operational. This was due to the difference in execution speeds of the laptop used compared to the server intended for deployment, timings in which the model was planned to train on did not match or work on the deployment server thus data collection and predictions made became increasingly later until an hour was left out. Additionally, a limitation that was presented was that of the server intended for deployment was in UTC and not on a GMT timezone in which the development laptop was on which further affected the timings of forecasting and system execution.
Additionally, limitations would be identified if future improvements were to be implemented in the system. Although these improvements are outlined below in the \textit{future improvements} section, one such one that would be suitable to discuss initially as an example would be that of comparing how tailoring the VADER lexicon with domain specific language. Language such as \textit{bullish}, \textit{bearish} and \textit{shorting} terms with identified and relevant weighted sentiment values would have on the outcome of the polarity classification of the VADER analyser, and how this also could more significantly affect the next hour forecasting of Bitcoin price. Due to the VADER sentiment analyser lexicon being open-source these additions would be reasonably easy to implement, the real task, however, would be on how to identify the suitable weighted sentiment value for the specific words added, which in itself could be an entirely new project.
Lastly, a limitation that could be identified and is also discussed in the results section above is that of the performance metrics not showing a clear distinction between the two network models. This limitation could be overcome by using more suitable explanative metrics, rather than relying on a more visual inspection, such as:
\begin{itemize}
\item Adjusted $R^2$ statistic - which shows how well the selected independant variables of the model explain the variability of the dependant variables, and shows how well the terms fit a regression line \cite{RMSEMAE}.
\item Mean Bias Error (MBE) - Is the Mean Absolute Error (MAE), which is calculated, if the absolute value is not taken (the signs of the errors are not removed) the MAE becomes the mean biased error. The MBE is intended to measure the average model bias and can convay more useful information that the MAE, but should be interpeted with caution due to the positive and negative error cancelling out. \cite{MBE}
\end{itemize}
By calculating these metrics could aid in distinguishing a correlation between the models based on metrics rather than on visual analysis.
How would changing epoch and batch size affect performance?
\subsection{Reflection}
Developing this project has opened my eyes up to the development of exciting topics that have been used to make the solution of this project real. It has taught me the basics of natural language pre-processing the steps involved to form the data correctly for classification, both how this would be used for a spam classification algorithm like the one implemented and for a traditional machine learning based sentiment analyser. On top of this, it has allowed me to discover other methods for sentiment analysis such as lexicon-based approaches such as VADER and how customisable to a topic domain they are when the lexicon is either altered or added to - along with how the weighted sentiments are formed (based on knowledge from the VADER paper \cite{VADERPaper}).
This project has taught me the basics of how both neural networks, recurrent neural networks and LSTM recurrent neural networks function and how they compare to each other in terms of performance for a particular task of time-series forecasting and their suitability for this task - quantified as knowledge from relevant sources used within the literature review. Known issues of each network such as exploding gradients and how an LSTM network overcomes these standard RNN issues. An in-depth understanding of the low-level functionality of an LSTM network, optimisers available - use, suitability and operation, regularisation techniques and dropout for minimising overfitting and underfitting of the network with the training datasets.
It has taught me how the classical (multinomial) naive Bayes probability model can be used for classification for spam or ham (wanted) data, and how the underlying maths and algorithm works - due to hand-coding the algorithm from scratch. Through the use of both the Bag Of Words algorithm for term-frequency identification: how it builds upon the base probability model of the Bayes algorithm. How TF-IDF (Term Frequency-Inverse Document Frequency) further builds upon this to both identify the amount of occurrence of words in a given text by assigning a weight, but also how it uses this to identify commonly used words that are of no relevance to classification. Further how the Addictive Smoothing method aids in dealing with words that have not been identified during training, due to not being initially presented in the training data.
Development of this project has given me a further understanding of time management and priorities, in the sense of what needs to focused on during development prior to other features being coded or implemented. An excellent example of where priorities had changed can be seen from the original PID form, \textit{Appendix B}, in which the solution to this project changed from focusing on the front-end application to focusing on the back-end system. This was due to a few factors that have already been identified in the Solution approach section. Where both stakeholders, I the developer and the supervisor of the project concluded that the creation of a front-end application, with a basic back-end for predictions, would not be a satisfactory solution, and more focus should be invested into the price predictions of Bitcoin. Another point where time management had to be considered was when implementing the Naive Bayes Classifier for spam filtering. Time management was not considered during its development and saw the feature go out-of-scope for what was initially wanted - the initial idea was to use the scikit-learns in-built Multinomial Naive Bayes classifier for spam classification. However, tutorials were found on top of the papers used for describing the algorithm during the literature review, which further described how to implement such an algorithm from scratch. Thus this was undertaken and leading to arguably time wasted coded the classification model rather than spending more time on coding the neural network. Arguably in a sense, as detailed in the previous paragraph, it taught me a great deal of how the algorithm works and it's limitations.
Furthermore, it has allowed me to form a better knowledge base and understanding of the Python language and data mining techniques used for the languages, to manipulate and used data for a required purpose, and has taught me relevant useful performance metrics for identifying the performance of a neural network and what the results of the metrics represent.
Lastly, if I were to conduct this project again, I would create a new lexicon that is explicitly tailored to the type of language used in the stock market as it has a unique language, and how suitable sentiment weights would be assigned to the new words. I would also adjust and alter both the batch size and epoch amount to identify how this affects the training and accuracy of predictions for both networks - with and without sentiment. I would also create a more refined and developed user interface for production level, which will more appropriately satisfy the problem of the public not having a system available to aid in investment decision making. The current interface, although providing relevant and useful information which would still be the information displayed to the stakeholders and users, the interface could be significantly improved to meet a production level solution. More arguably I would build upon what this project has become and implement the future improvements laid out in the \textit{fucute improvements} section below. Most notably a future improvement that would be implemented would be that of additional performance metrics such as $R^2$ statistic and Mean Bias error along with K-fold Cross-Validation (which was not implemented due to time constraints and not knowing how to implement this for continuous data). This will be used to justify further both the performance of the two networks with validation of the model and to accurately identify a correlation between how sentiment affect the price of bitcoin and the accuracy of the network with it embedded, rather than relying on visual inspected as used for the majority of the results discussion.
\section{Social, Legal and Ethical Issues}
During the creation of the Project Initiation Document, with the intended solution, no foreseeable Social, Legal or Ethical Issues were identified. As the concept and solution changed considerably both before proper development and also during development, no other Social, Legal or Ethical Issues presented themselves. There was however a developer agreement policy that was required to be agreed with prior being able to access the Twitter API, for both historical tweets and with use with the Tweepy Python package. This document outlined various limitations of the use of the API such as rate limits and security personal security of generated API keys, but nothing significant that affected the development of the project apart from the mentioned section of reverse engineering the Twitter API, in which this project had not set out to do. As tweets posted on Twitter are public and free to access by anyone there are no personal information issues that could be presented; thus this agreement was no a Social, Legal or Ethical issue. \textit{Twitter API - Developer Agreement and Policy} \cite{TwitterTerms}
\newpage
\section{Conclusion and Future Improvements}
\subsection{Conclusion}
What was aimed for?
What was produced?
Interesting what would a days prediction would show due to sentiment not directly affecting the next hour price
\subsection{Future Improvements}
as such comparing recurrent neural network models, implementation and the affect of regularisation techniques, use of different optimisers would have on the network, how use of ngrams could be used to improve language detection, comparing hand-coded naive bayes model to that of scikit-learns in built classifiers, alterations and additions to the VADER lexicon to tailor it with domain specific language and relevant weighted sentiment values
Shifting the predicted data by and hour and sequencing over previous data - will also allow proper use of look-back windows
Another could be to predict the hour of sentiment and create a threshold for it.
@ -1787,6 +1858,8 @@ def create_sets(self, data, lookback, sentiment):
k-fold cross validation was attempted, but issues with continuous data
How would this work what will it show or validate?
How would changing epoch and batch size affect performance?
\newpage
\nocite{*}

View File

@ -6,168 +6,172 @@
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {3}Glossary}{4}{section.3}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {4}Introduction}{9}{section.4}
\contentsline {section}{\numberline {4}Introduction}{10}{section.4}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {5}Problem Articulation}{11}{section.5}
\contentsline {section}{\numberline {5}Problem Articulation}{12}{section.5}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {5.1}Problem Statement}{11}{subsection.5.1}
\contentsline {subsection}{\numberline {5.1}Problem Statement}{12}{subsection.5.1}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {5.2}Stakeholders}{11}{subsection.5.2}
\contentsline {subsection}{\numberline {5.2}Stakeholders}{12}{subsection.5.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {5.3}Project Motivation}{12}{subsection.5.3}
\contentsline {subsection}{\numberline {5.3}Project Motivation}{13}{subsection.5.3}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {5.4}Technical Specification}{14}{subsection.5.4}
\contentsline {subsection}{\numberline {5.4}Technical Specification}{15}{subsection.5.4}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {6}Quality Goals}{16}{section.6}
\contentsline {section}{\numberline {6}Quality Goals}{17}{section.6}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {6.1}Process Description}{16}{subsection.6.1}
\contentsline {subsection}{\numberline {6.1}Process Description}{17}{subsection.6.1}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {6.2}Quality Objectives}{16}{subsection.6.2}
\contentsline {subsection}{\numberline {6.2}Quality Objectives}{17}{subsection.6.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {6.3}Tools to Ensure Quality}{17}{subsection.6.3}
\contentsline {subsection}{\numberline {6.3}Tools to Ensure Quality}{18}{subsection.6.3}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {7}Literature Review}{18}{section.7}
\contentsline {section}{\numberline {7}Literature Review}{19}{section.7}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.1}Existing Tools}{18}{subsection.7.1}
\contentsline {subsection}{\numberline {7.1}Existing Tools}{19}{subsection.7.1}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.2}Related research}{18}{subsection.7.2}
\contentsline {subsection}{\numberline {7.2}Related research}{19}{subsection.7.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.3}Data Collection}{19}{subsection.7.3}
\contentsline {subsection}{\numberline {7.3}Data Collection}{20}{subsection.7.3}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.3.1}Twitter and Twitter API}{19}{subsubsection.7.3.1}
\contentsline {subsubsection}{\numberline {7.3.1}Twitter and Twitter API}{20}{subsubsection.7.3.1}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.3.2}Tweepy Python Package}{20}{subsubsection.7.3.2}
\contentsline {subsubsection}{\numberline {7.3.2}Tweepy Python Package}{21}{subsubsection.7.3.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.4}Sentiment Analysis}{21}{subsection.7.4}
\contentsline {subsection}{\numberline {7.4}Sentiment Analysis}{22}{subsection.7.4}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.4.1}Natural Language Processing}{21}{subsubsection.7.4.1}
\contentsline {subsubsection}{\numberline {7.4.1}Natural Language Processing}{22}{subsubsection.7.4.1}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.4.2}Valence Aware Dictionary and sEntiment Reasoning}{22}{subsubsection.7.4.2}
\contentsline {subsubsection}{\numberline {7.4.2}Valence Aware Dictionary and sEntiment Reasoning}{23}{subsubsection.7.4.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.5}Neural Networks}{23}{subsection.7.5}
\contentsline {subsection}{\numberline {7.5}Neural Networks}{24}{subsection.7.5}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.5.1}Recurrent Neural Network (RNN)}{24}{subsubsection.7.5.1}
\contentsline {subsubsection}{\numberline {7.5.1}Recurrent Neural Network (RNN)}{25}{subsubsection.7.5.1}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.5.2}Long-Short Term Memory (LSTM)}{25}{subsubsection.7.5.2}
\contentsline {subsubsection}{\numberline {7.5.2}Long-Short Term Memory (LSTM)}{26}{subsubsection.7.5.2}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.5.3}Keras and TensorFlow}{26}{subsubsection.7.5.3}
\contentsline {subsubsection}{\numberline {7.5.3}Keras and TensorFlow}{27}{subsubsection.7.5.3}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.5.4}Optimisers}{27}{subsubsection.7.5.4}
\contentsline {subsubsection}{\numberline {7.5.4}Optimisers}{28}{subsubsection.7.5.4}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.5.5}Regularisation}{29}{subsubsection.7.5.5}
\contentsline {subsubsection}{\numberline {7.5.5}Regularisation}{30}{subsubsection.7.5.5}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.6}Machine Learning}{29}{subsection.7.6}
\contentsline {subsubsection}{\numberline {7.5.6}Dropout}{30}{subsubsection.7.5.6}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {7.6.1}Naive Bayes}{29}{subsubsection.7.6.1}
\contentsline {subsection}{\numberline {7.6}Machine Learning}{30}{subsection.7.6}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.7}Bag Of Words}{30}{subsection.7.7}
\contentsline {subsubsection}{\numberline {7.6.1}Naive Bayes}{30}{subsubsection.7.6.1}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.8}TF-IDF}{30}{subsection.7.8}
\contentsline {subsection}{\numberline {7.7}Bag Of Words}{31}{subsection.7.7}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.9}Addictive Smoothing}{31}{subsection.7.9}
\contentsline {subsection}{\numberline {7.8}TF-IDF}{32}{subsection.7.8}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {7.10}Regression Performance Metrics}{31}{subsection.7.10}
\contentsline {subsection}{\numberline {7.9}Addictive Smoothing}{32}{subsection.7.9}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {8}Solution Approach}{33}{section.8}
\contentsline {subsection}{\numberline {7.10}Regression Performance Metrics}{33}{subsection.7.10}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.1}Data gathering}{33}{subsection.8.1}
\contentsline {section}{\numberline {8}Solution Approach}{34}{section.8}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.2}Data pre-processing}{34}{subsection.8.2}
\contentsline {subsection}{\numberline {8.1}Data gathering}{34}{subsection.8.1}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.3}Spam Filtering}{34}{subsection.8.3}
\contentsline {subsection}{\numberline {8.2}Data pre-processing}{35}{subsection.8.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.4}Language Detection}{35}{subsection.8.4}
\contentsline {subsection}{\numberline {8.3}Spam Filtering}{35}{subsection.8.3}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.5}Sentiment Analysis}{35}{subsection.8.5}
\contentsline {subsection}{\numberline {8.4}Language Detection}{36}{subsection.8.4}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.6}Neural Network}{36}{subsection.8.6}
\contentsline {subsection}{\numberline {8.5}Sentiment Analysis}{36}{subsection.8.5}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.7}Price Forecasting}{38}{subsection.8.7}
\contentsline {subsection}{\numberline {8.6}Neural Network}{37}{subsection.8.6}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.8}Frontend Application}{38}{subsection.8.8}
\contentsline {subsection}{\numberline {8.7}Price Forecasting}{39}{subsection.8.7}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.9}With reference to Initial PID}{38}{subsection.8.9}
\contentsline {subsection}{\numberline {8.8}Frontend Application}{39}{subsection.8.8}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.10}Solution Summary}{39}{subsection.8.10}
\contentsline {subsection}{\numberline {8.9}With reference to Initial PID}{39}{subsection.8.9}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {8.11}Data flow Overview}{40}{subsection.8.11}
\contentsline {subsection}{\numberline {8.10}Solution Summary}{40}{subsection.8.10}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {9}System Design}{41}{section.9}
\contentsline {subsection}{\numberline {8.11}Initial Data flow Overview}{41}{subsection.8.11}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {9.1}Dataflow Designs}{41}{subsection.9.1}
\contentsline {section}{\numberline {9}System Design}{42}{section.9}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {9.2}Interface Design}{48}{subsection.9.2}
\contentsline {subsection}{\numberline {9.1}Dataflow Designs}{42}{subsection.9.1}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {10}Implementation}{50}{section.10}
\contentsline {subsection}{\numberline {9.2}Interface Design}{49}{subsection.9.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {10.1}Data collection}{50}{subsection.10.1}
\contentsline {section}{\numberline {10}Implementation}{51}{section.10}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.1.1}Price Time-Series Historical Data}{50}{subsubsection.10.1.1}
\contentsline {subsection}{\numberline {10.1}Data collection}{51}{subsection.10.1}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.1.2}Price Time-Series Live Data}{51}{subsubsection.10.1.2}
\contentsline {subsubsection}{\numberline {10.1.1}Price Time-Series Historical Data}{51}{subsubsection.10.1.1}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.1.3}Historical Tweet Collection}{52}{subsubsection.10.1.3}
\contentsline {subsubsection}{\numberline {10.1.2}Price Time-Series Live Data}{52}{subsubsection.10.1.2}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.1.4}Live Tweet Collection}{54}{subsubsection.10.1.4}
\contentsline {subsubsection}{\numberline {10.1.3}Historical Tweet Collection}{53}{subsubsection.10.1.3}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {10.2}Data pre-processing}{56}{subsection.10.2}
\contentsline {subsubsection}{\numberline {10.1.4}Live Tweet Collection}{55}{subsubsection.10.1.4}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.2.1}Tweet Filtering}{56}{subsubsection.10.2.1}
\contentsline {subsection}{\numberline {10.2}Data pre-processing}{57}{subsection.10.2}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.2.2}Language detection filtering}{57}{subsubsection.10.2.2}
\contentsline {subsubsection}{\numberline {10.2.1}Tweet Filtering}{57}{subsubsection.10.2.1}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.2.3}Spam filter - Tokenisation, Ngrams, Stopword removal and Stemming}{59}{subsubsection.10.2.3}
\contentsline {subsubsection}{\numberline {10.2.2}Language detection filtering}{58}{subsubsection.10.2.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {10.3}Spam Filtering}{60}{subsection.10.3}
\contentsline {subsubsection}{\numberline {10.2.3}Spam filter - Tokenisation, Ngrams, Stopword removal and Stemming}{60}{subsubsection.10.2.3}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.3.1}Naive Bayes model}{63}{subsubsection.10.3.1}
\contentsline {subsection}{\numberline {10.3}Spam Filtering}{61}{subsection.10.3}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.3.2}Classification}{64}{subsubsection.10.3.2}
\contentsline {subsubsection}{\numberline {10.3.1}Naive Bayes model}{64}{subsubsection.10.3.1}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.3.3}Predict}{65}{subsubsection.10.3.3}
\contentsline {subsubsection}{\numberline {10.3.2}Classification}{65}{subsubsection.10.3.2}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.3.4}Metrics}{65}{subsubsection.10.3.4}
\contentsline {subsubsection}{\numberline {10.3.3}Predict}{66}{subsubsection.10.3.3}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {10.4}Sentiment Analysis}{66}{subsection.10.4}
\contentsline {subsubsection}{\numberline {10.3.4}Metrics}{66}{subsubsection.10.3.4}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {10.5}Recurrent Neural Network - LSTM}{67}{subsection.10.5}
\contentsline {subsection}{\numberline {10.4}Sentiment Analysis}{67}{subsection.10.4}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.5.1}Dataset Creation}{67}{subsubsection.10.5.1}
\contentsline {subsection}{\numberline {10.5}Recurrent Neural Network - LSTM}{68}{subsection.10.5}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.5.2}Training and Testing Model}{69}{subsubsection.10.5.2}
\contentsline {subsubsection}{\numberline {10.5.1}Dataset Creation}{68}{subsubsection.10.5.1}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {10.6}Future Prediction Forecasting}{71}{subsection.10.6}
\contentsline {subsubsection}{\numberline {10.5.2}Training and Testing Model}{70}{subsubsection.10.5.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {10.7}User Interface}{73}{subsection.10.7}
\contentsline {subsection}{\numberline {10.6}Future Prediction Forecasting}{72}{subsection.10.6}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.7.1}Key Functions}{73}{subsubsection.10.7.1}
\contentsline {subsection}{\numberline {10.7}User Interface}{74}{subsection.10.7}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {10.7.2}Final Interface}{76}{subsubsection.10.7.2}
\contentsline {subsubsection}{\numberline {10.7.1}Key Functions}{74}{subsubsection.10.7.1}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {11}Testing Metrics and Accuracy}{77}{section.11}
\contentsline {subsubsection}{\numberline {10.7.2}Final Interface}{77}{subsubsection.10.7.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {11.1}Integration Testing}{77}{subsection.11.1}
\contentsline {section}{\numberline {11}Testing Metrics and Accuracy}{79}{section.11}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {11.2}Accuracy of Model \& Results}{78}{subsection.11.2}
\contentsline {subsection}{\numberline {11.1}Integration Testing}{79}{subsection.11.1}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {11.2.1}Results Discussion}{78}{subsubsection.11.2.1}
\contentsline {subsection}{\numberline {11.2}Accuracy of Model \& Results}{80}{subsection.11.2}
\defcounter {refsection}{0}\relax
\contentsline {subsubsection}{\numberline {11.2.2}Execution Speeds}{80}{subsubsection.11.2.2}
\contentsline {subsubsection}{\numberline {11.2.1}Results Discussion}{80}{subsubsection.11.2.1}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {12}Project Evaluation}{81}{section.12}
\contentsline {subsubsection}{\numberline {11.2.2}Execution Speeds}{83}{subsubsection.11.2.2}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {13}Discussion: Contribution and Reflection}{81}{section.13}
\contentsline {section}{\numberline {12}Discussion: Contribution and Reflection}{84}{section.12}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {13.1}Limitations}{81}{subsection.13.1}
\contentsline {subsection}{\numberline {12.1}Limitations}{85}{subsection.12.1}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {14}Conclusion and Future Improvements}{82}{section.14}
\contentsline {subsection}{\numberline {12.2}Reflection}{86}{subsection.12.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {14.1}Conclusion}{82}{subsection.14.1}
\contentsline {section}{\numberline {13}Social, Legal and Ethical Issues}{88}{section.13}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {14.2}Future Improvements}{82}{subsection.14.2}
\contentsline {section}{\numberline {14}Conclusion and Future Improvements}{89}{section.14}
\defcounter {refsection}{0}\relax
\contentsline {section}{\numberline {15}Appendices}{88}{section.15}
\contentsline {subsection}{\numberline {14.1}Conclusion}{89}{subsection.14.1}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {15.1}Appendix A - Project Initiation Document}{88}{subsection.15.1}
\contentsline {subsection}{\numberline {14.2}Future Improvements}{89}{subsection.14.2}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {15.2}Appendix B - Log book}{101}{subsection.15.2}
\contentsline {section}{\numberline {15}Appendices}{95}{section.15}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {15.1}Appendix A - Project Initiation Document}{95}{subsection.15.1}
\defcounter {refsection}{0}\relax
\contentsline {subsection}{\numberline {15.2}Appendix B - Log book}{108}{subsection.15.2}

5126
document.txt Normal file

File diff suppressed because it is too large Load Diff

BIN
images/with_sentiment.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 118 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 116 KiB

View File

@ -420,6 +420,26 @@
url={https://missinglink.ai/guides/neural-network-concepts/neural-network-bias-bias-neuron-overfitting-underfitting/}
}
@inproceedings{dropoutM,
title={Dropout in (Deep) Machine learning},
author={Amar Budhiraja},
booktitle={},
pages={},
year={Dec 15, 2016},
organization={Medium},
url={https://medium.com/@amarbudhiraja/https-medium-com-amarbudhiraja-learning-less-to-learn-better-dropout-in-deep-machine-learning-74334da4bfc5}
}
@inproceedings{dropoutKeras,
title={Dropout},
author={Keras Team},
booktitle={},
pages={},
year={},
organization={Keras},
url={https://keras.io/layers/core/#dropout}
}
@inproceedings{NValgor,
title={Naive Bayes},
author={scikit-learn developers},
@ -469,3 +489,23 @@
organization={Medium},
url={https://medium.freecodecamp.org/machine-learning-mean-squared-error-regression-line-c7dde9a26b93}
}
@inproceedings{MBE,
title={MAE and RMSE Which Metric is Better},
author={JJ},
booktitle={},
pages={},
year={Mar 23, 2016},
organization={Medium},
url={https://medium.com/human-in-a-machine-world/mae-and-rmse-which-metric-is-better-e60ac3bde13d}
}
@inproceedings{TwitterTerms,
title={Developer Agreement and Policy},
author={Twitter},
booktitle={},
pages={},
year={Effective: May 25, 2018},
organization={Twitter Corp.},
url={https://developer.twitter.com/en/developer-terms/agreement-and-policy.html}
}