diff --git a/document.aux b/document.aux index df26285..151da2f 100644 --- a/document.aux +++ b/document.aux @@ -41,63 +41,150 @@ \newlabel{stakeholders}{{}{8}{Stakeholders}{section*.8}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Project Constraints}{8}{section*.9}} \newlabel{constraints}{{}{8}{Project Constraints}{section*.9}{}} +\abx@aux@cite{2} +\abx@aux@segm{0}{0}{2} +\abx@aux@segm{0}{0}{1} +\abx@aux@cite{3} +\abx@aux@segm{0}{0}{3} +\abx@aux@cite{4} +\abx@aux@segm{0}{0}{4} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Literature Review}{9}{section*.10}} \newlabel{literature}{{}{9}{Literature Review}{section*.10}{}} \@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Existing Tools}{9}{section*.11}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Related Work}{9}{section*.12}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Tweet Collection}{9}{section*.13}} -\newlabel{tweet_collection}{{}{9}{Tweet Collection}{section*.13}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Sentiment Analysis}{9}{section*.14}} -\newlabel{sentiment}{{}{9}{Sentiment Analysis}{section*.14}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Algorithms}{9}{section*.15}} -\newlabel{algorithms}{{}{9}{Algorithms}{section*.15}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Techniques}{9}{section*.16}} -\newlabel{techniques}{{}{9}{Techniques}{section*.16}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Neural Networks}{9}{section*.17}} -\newlabel{networks}{{}{9}{Neural Networks}{section*.17}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Types}{9}{section*.18}} -\newlabel{types}{{}{9}{Types}{section*.18}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{LSTMs}{9}{section*.19}} -\newlabel{lstms}{{}{9}{LSTMs}{section*.19}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Machine Learning}{9}{section*.20}} -\newlabel{machine}{{}{9}{Machine Learning}{section*.20}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Logistical Regression}{9}{section*.21}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Solution Approach}{10}{section*.22}} -\newlabel{solution}{{}{10}{Solution Approach}{section*.22}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Solution Summary}{10}{section*.23}} -\newlabel{sumary}{{}{10}{Solution Summary}{section*.23}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Data flow Overview}{10}{section*.24}} -\newlabel{data-flow}{{}{10}{Data flow Overview}{section*.24}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Packages, Tools and Techniques}{10}{section*.25}} -\newlabel{tools}{{}{10}{Packages, Tools and Techniques}{section*.25}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{System Design and Implementation}{11}{section*.26}} -\newlabel{implementation}{{}{11}{System Design and Implementation}{section*.26}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Data collection}{11}{section*.27}} -\newlabel{collection}{{}{11}{Data collection}{section*.27}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Data processing}{11}{section*.28}} -\newlabel{processing}{{}{11}{Data processing}{section*.28}{}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Preprocessing}{11}{section*.29}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Tweet Filtering}{11}{section*.30}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Text Cleaning}{11}{section*.31}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Ngram based Language detection filtering}{11}{section*.32}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Spam Filtering}{11}{section*.33}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Tweet Processing}{11}{section*.34}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Naive Bayes model}{11}{section*.35}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Multinomial Naive Bayes}{11}{section*.36}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Bernoullis Naive Bayes}{11}{section*.37}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Gaussuan Naive Bayes}{11}{section*.38}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Sentiment Analysis}{11}{section*.39}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{VADER}{11}{section*.40}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Testing: Verification and Reflection}{12}{section*.41}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Discussion: Contribution and Reflection}{13}{section*.42}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Limitations}{13}{section*.43}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Social, Legal and Ethical Issues}{14}{section*.44}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Conclusion and Future Improvements}{15}{section*.45}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Conclusion}{15}{section*.46}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Future Improvements}{15}{section*.47}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{References}{16}{section*.48}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Appendices}{17}{section*.50}} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Appendix A - Project Initiation Document}{17}{section*.51}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Related research}{9}{section*.12}} +\abx@aux@cite{5} +\abx@aux@segm{0}{0}{5} +\abx@aux@cite{6} +\abx@aux@segm{0}{0}{6} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Data Collection}{10}{section*.13}} +\newlabel{tweet_collection}{{}{10}{Data Collection}{section*.13}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Twitter and Twitter API}{10}{section*.14}} +\abx@aux@cite{7} +\abx@aux@segm{0}{0}{7} +\abx@aux@segm{0}{0}{6} +\abx@aux@segm{0}{0}{1} +\abx@aux@segm{0}{0}{2} +\abx@aux@segm{0}{0}{4} +\abx@aux@cite{8} +\abx@aux@segm{0}{0}{8} +\abx@aux@cite{9} +\abx@aux@segm{0}{0}{9} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Tweepy Python Package}{11}{section*.15}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Sentiment Analysis}{11}{section*.16}} +\newlabel{sentiment}{{}{11}{Sentiment Analysis}{section*.16}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Natural Language Processing}{11}{section*.17}} +\newlabel{algorithms}{{}{11}{Natural Language Processing}{section*.17}{}} +\abx@aux@cite{10} +\abx@aux@segm{0}{0}{10} +\abx@aux@segm{0}{0}{10} +\abx@aux@segm{0}{0}{10} +\abx@aux@cite{11} +\abx@aux@segm{0}{0}{11} +\abx@aux@segm{0}{0}{10} +\abx@aux@segm{0}{0}{10} +\abx@aux@segm{0}{0}{10} +\abx@aux@cite{12} +\abx@aux@segm{0}{0}{12} +\abx@aux@cite{13} +\abx@aux@segm{0}{0}{13} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Valence Aware Dictionary and sEntiment Reasoning}{12}{section*.18}} +\newlabel{Vader}{{}{12}{Valence Aware Dictionary and sEntiment Reasoning}{section*.18}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Neural Networks}{12}{section*.19}} +\newlabel{networks}{{}{12}{Neural Networks}{section*.19}{}} +\abx@aux@cite{14} +\abx@aux@segm{0}{0}{14} +\abx@aux@segm{0}{0}{13} +\abx@aux@cite{15} +\abx@aux@segm{0}{0}{15} +\abx@aux@cite{16} +\abx@aux@segm{0}{0}{16} +\abx@aux@cite{17} +\abx@aux@segm{0}{0}{17} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Recurrent Neural Network (RNN)}{13}{section*.20}} +\newlabel{types}{{}{13}{Recurrent Neural Network (RNN)}{section*.20}{}} +\abx@aux@cite{18} +\abx@aux@segm{0}{0}{18} +\abx@aux@cite{19} +\abx@aux@segm{0}{0}{19} +\abx@aux@cite{20} +\abx@aux@segm{0}{0}{20} +\abx@aux@segm{0}{0}{20} +\abx@aux@cite{21} +\abx@aux@segm{0}{0}{21} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Long-Short Term Memory (LSTM)}{14}{section*.21}} +\newlabel{lstms}{{}{14}{Long-Short Term Memory (LSTM)}{section*.21}{}} +\abx@aux@segm{0}{0}{21} +\abx@aux@segm{0}{0}{20} +\abx@aux@segm{0}{0}{21} +\abx@aux@cite{22} +\abx@aux@segm{0}{0}{22} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Kera and TensorFlow}{15}{section*.22}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Optimisers}{15}{section*.23}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Loss}{16}{section*.24}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Regularisation - Dropouts}{16}{section*.25}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Machine Learning}{16}{section*.26}} +\newlabel{machine}{{}{16}{Machine Learning}{section*.26}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Naive Bayes}{16}{section*.27}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Other Techniques}{16}{section*.28}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Ngrams}{16}{section*.29}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Scoring and Validation}{16}{section*.30}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Solution Approach}{17}{section*.31}} +\newlabel{solution}{{}{17}{Solution Approach}{section*.31}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Solution Summary}{17}{section*.32}} +\newlabel{sumary}{{}{17}{Solution Summary}{section*.32}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Data flow Overview}{17}{section*.33}} +\newlabel{data-flow}{{}{17}{Data flow Overview}{section*.33}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Packages, Tools and Techniques}{17}{section*.34}} +\newlabel{tools}{{}{17}{Packages, Tools and Techniques}{section*.34}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{System Design and Implementation}{18}{section*.35}} +\newlabel{implementation}{{}{18}{System Design and Implementation}{section*.35}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Data collection}{18}{section*.36}} +\newlabel{collection}{{}{18}{Data collection}{section*.36}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Price Time-series Data}{18}{section*.37}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Data processing}{18}{section*.38}} +\newlabel{processing}{{}{18}{Data processing}{section*.38}{}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Preprocessing}{18}{section*.39}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Tweet Filtering}{18}{section*.40}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Text Cleaning}{18}{section*.41}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Ngram based Language detection filtering}{18}{section*.42}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Spam Filtering}{18}{section*.43}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Tweet Processing}{18}{section*.44}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {paragraph}{Naive Bayes model}{18}{section*.45}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Sentiment Analysis}{18}{section*.46}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{VADER}{18}{section*.47}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Recurrent Neural Network}{18}{section*.48}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Training and Testing Model}{18}{section*.49}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Validation}{18}{section*.50}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsubsection}{Future Prediction Forecasting}{18}{section*.51}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Testing: Verification and Reflection}{19}{section*.52}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Discussion: Contribution and Reflection}{20}{section*.53}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Limitations}{20}{section*.54}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Conclusion and Future Improvements}{21}{section*.55}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Conclusion}{21}{section*.56}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Future Improvements}{21}{section*.57}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {section}{Appendices}{24}{section*.59}} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Appendix A - Project Initiation Document}{24}{section*.60}} \abx@aux@refcontextdefaultsdone \abx@aux@defaultrefcontext{0}{1}{none/global//global/global} -\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Appendix B - Log book}{30}{section*.52}} +\abx@aux@defaultrefcontext{0}{2}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{3}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{4}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{5}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{6}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{7}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{8}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{9}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{10}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{11}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{12}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{13}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{14}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{15}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{16}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{17}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{18}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{19}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{20}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{21}{none/global//global/global} +\abx@aux@defaultrefcontext{0}{22}{none/global//global/global} +\@writefile{toc}{\defcounter {refsection}{0}\relax }\@writefile{toc}{\contentsline {subsection}{Appendix B - Log book}{37}{section*.61}} diff --git a/document.bbl b/document.bbl index 00e66b7..b378422 100644 --- a/document.bbl +++ b/document.bbl @@ -67,6 +67,665 @@ \verb https://arxiv.org/pdf/1610.09225.pdf \endverb \endentry + \entry{2}{inproceedings}{} + \name{author}{1}{}{% + {{hash=f2e3d1233a4ae1ff0dc165a2561130e0}{% + family={Evita\bibnamedelima Stenqvist}, + familyi={E\bibinitperiod\bibinitdelim S\bibinitperiod}, + given={Jacob\bibnamedelima Lonno}, + giveni={J\bibinitperiod\bibinitdelim L\bibinitperiod}}}% + } + \list{organization}{1}{% + {Diva}% + } + \strng{namehash}{f2e3d1233a4ae1ff0dc165a2561130e0} + \strng{fullhash}{f2e3d1233a4ae1ff0dc165a2561130e0} + \strng{bibnamehash}{f2e3d1233a4ae1ff0dc165a2561130e0} + \strng{authorbibnamehash}{f2e3d1233a4ae1ff0dc165a2561130e0} + \strng{authornamehash}{f2e3d1233a4ae1ff0dc165a2561130e0} + \strng{authorfullhash}{f2e3d1233a4ae1ff0dc165a2561130e0} + \field{sortinit}{2} + \field{sortinithash}{cbff857e587bcb4635511624d773949e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Predicting Bitcoin price fluctuation with Twitter sentiment analysis} + \field{year}{2017} + \verb{urlraw} + \verb http://www.diva-portal.org/smash/get/diva2:1110776/FULLTEXT01.pdf + \endverb + \verb{url} + \verb http://www.diva-portal.org/smash/get/diva2:1110776/FULLTEXT01.pdf + \endverb + \endentry + \entry{3}{inproceedings}{} + \name{author}{1}{}{% + {{hash=0082b92db99b10a1b0be2e2a81ee914f}{% + family={Intel-Corporation}, + familyi={I\bibinithyphendelim C\bibinitperiod}}}% + } + \list{organization}{1}{% + {Code Project}% + } + \strng{namehash}{0082b92db99b10a1b0be2e2a81ee914f} + \strng{fullhash}{0082b92db99b10a1b0be2e2a81ee914f} + \strng{bibnamehash}{0082b92db99b10a1b0be2e2a81ee914f} + \strng{authorbibnamehash}{0082b92db99b10a1b0be2e2a81ee914f} + \strng{authornamehash}{0082b92db99b10a1b0be2e2a81ee914f} + \strng{authorfullhash}{0082b92db99b10a1b0be2e2a81ee914f} + \field{sortinit}{4} + \field{sortinithash}{11cdaee3b18e01d77f3f428b13c1fc76} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Stock Predictions through News Sentiment Analysis} + \field{year}{2017} + \verb{urlraw} + \verb https://www.codeproject.com/Articles/1201444/Stock-Predictions-through-News-Sentiment-Analysis + \endverb + \verb{url} + \verb https://www.codeproject.com/Articles/1201444/Stock-Predictions-through-News-Sentiment-Analysis + \endverb + \endentry + \entry{4}{inproceedings}{} + \name{author}{1}{}{% + {{hash=be03763696a3a6a39ccce2baf4fb6f4e}{% + family={Sean\bibnamedelima McNally}, + familyi={S\bibinitperiod\bibinitdelim M\bibinitperiod}, + given={Simon\bibnamedelima Caton}, + giveni={S\bibinitperiod\bibinitdelim C\bibinitperiod}, + suffix={Jason\bibnamedelima Roche}, + suffixi={J\bibinitperiod\bibinitdelim R\bibinitperiod}}}% + } + \list{organization}{1}{% + {IEEE}% + } + \strng{namehash}{be03763696a3a6a39ccce2baf4fb6f4e} + \strng{fullhash}{be03763696a3a6a39ccce2baf4fb6f4e} + \strng{bibnamehash}{be03763696a3a6a39ccce2baf4fb6f4e} + \strng{authorbibnamehash}{be03763696a3a6a39ccce2baf4fb6f4e} + \strng{authornamehash}{be03763696a3a6a39ccce2baf4fb6f4e} + \strng{authorfullhash}{be03763696a3a6a39ccce2baf4fb6f4e} + \field{sortinit}{5} + \field{sortinithash}{3c19c3776b658b3558e9e2e4840c01e2} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{2018 26th Euromicro International Conference on Parallel, Distributed and Network-based Processing (PDP)} + \field{title}{Predicting the Price of Bitcoin Using Machine Learning} + \field{year}{2018} + \field{pages}{344\bibrangedash 347} + \range{pages}{4} + \verb{urlraw} + \verb https://ieeexplore.ieee.org/abstract/document/8374483 + \endverb + \verb{url} + \verb https://ieeexplore.ieee.org/abstract/document/8374483 + \endverb + \endentry + \entry{5}{inproceedings}{} + \name{author}{1}{}{% + {{hash=2491bc9c7d8731e1ae33124093bc7026}{% + family={Twitter}, + familyi={T\bibinitperiod}}}% + } + \list{organization}{1}{% + {Twitter Developers}% + } + \strng{namehash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{fullhash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{bibnamehash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{authorbibnamehash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{authornamehash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{authorfullhash}{2491bc9c7d8731e1ae33124093bc7026} + \field{sortinit}{6} + \field{sortinithash}{57e57fb8451e7fcfa45d1e069f6d3136} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Search Tweets} + \field{year}{2018} + \verb{urlraw} + \verb https://developer.twitter.com/en/docs/tweets/search/overview + \endverb + \verb{url} + \verb https://developer.twitter.com/en/docs/tweets/search/overview + \endverb + \endentry + \entry{6}{inproceedings}{} + \name{author}{1}{}{% + {{hash=2491bc9c7d8731e1ae33124093bc7026}{% + family={Twitter}, + familyi={T\bibinitperiod}}}% + } + \list{organization}{1}{% + {Twitter Developers}% + } + \strng{namehash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{fullhash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{bibnamehash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{authorbibnamehash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{authornamehash}{2491bc9c7d8731e1ae33124093bc7026} + \strng{authorfullhash}{2491bc9c7d8731e1ae33124093bc7026} + \field{sortinit}{7} + \field{sortinithash}{c818dd9105a2852444fc9f5e145c294e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Consuming streaming data} + \field{year}{2018} + \verb{urlraw} + \verb https://developer.twitter.com/en/docs/tutorials/consuming-streaming-data.html + \endverb + \verb{url} + \verb https://developer.twitter.com/en/docs/tutorials/consuming-streaming-data.html + \endverb + \endentry + \entry{7}{inproceedings}{} + \name{author}{1}{}{% + {{hash=1e48a059b3f2c3703bea8d54a9e002c9}{% + family={Roesslein}, + familyi={R\bibinitperiod}, + given={Joshua}, + giveni={J\bibinitperiod}}}% + } + \list{organization}{1}{% + {Tweepy}% + } + \strng{namehash}{1e48a059b3f2c3703bea8d54a9e002c9} + \strng{fullhash}{1e48a059b3f2c3703bea8d54a9e002c9} + \strng{bibnamehash}{1e48a059b3f2c3703bea8d54a9e002c9} + \strng{authorbibnamehash}{1e48a059b3f2c3703bea8d54a9e002c9} + \strng{authornamehash}{1e48a059b3f2c3703bea8d54a9e002c9} + \strng{authorfullhash}{1e48a059b3f2c3703bea8d54a9e002c9} + \field{sortinit}{8} + \field{sortinithash}{07edf88d4ea82509b9c4b4d13f41c452} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Streaming With Tweepy} + \field{year}{2009} + \verb{urlraw} + \verb http://docs.tweepy.org/en/v3.4.0/streaming_how_to.html + \endverb + \verb{url} + \verb http://docs.tweepy.org/en/v3.4.0/streaming_how_to.html + \endverb + \endentry + \entry{8}{inproceedings}{} + \name{author}{1}{}{% + {{hash=ae1fc492a61542a18835322c490c5af1}{% + family={Mehrnoush\bibnamedelima Shamsfard}, + familyi={M\bibinitperiod\bibinitdelim S\bibinitperiod}, + given={Samira\bibnamedelima Noferesti}, + giveni={S\bibinitperiod\bibinitdelim N\bibinitperiod}}}% + } + \list{organization}{1}{% + {Elsevier}% + } + \strng{namehash}{ae1fc492a61542a18835322c490c5af1} + \strng{fullhash}{ae1fc492a61542a18835322c490c5af1} + \strng{bibnamehash}{ae1fc492a61542a18835322c490c5af1} + \strng{authorbibnamehash}{ae1fc492a61542a18835322c490c5af1} + \strng{authornamehash}{ae1fc492a61542a18835322c490c5af1} + \strng{authorfullhash}{ae1fc492a61542a18835322c490c5af1} + \field{sortinit}{1} + \field{sortinithash}{2174f786c6195e7fe2ee1c229b416e29} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{Journal of Biomedical Informatics} + \field{title}{Using Linked Data for polarity classification of patients experiences} + \field{year}{2015} + \field{pages}{6\bibrangedash 19} + \range{pages}{14} + \verb{urlraw} + \verb https://www.sciencedirect.com/science/article/pii/S1532046415001276 + \endverb + \verb{url} + \verb https://www.sciencedirect.com/science/article/pii/S1532046415001276 + \endverb + \endentry + \entry{9}{inproceedings}{} + \name{author}{1}{}{% + {{hash=75d8d9e701bff6c60b3e05aa6c0c3f9f}{% + family={Chedia\bibnamedelima Dhaoui}, + familyi={C\bibinitperiod\bibinitdelim D\bibinitperiod}, + given={Lay\bibnamedelimb Peng\bibnamedelima Tan}, + giveni={L\bibinitperiod\bibinitdelim P\bibinitperiod\bibinitdelim T\bibinitperiod}, + suffix={Cynthia\bibnamedelimb M.\bibnamedelimi Webster}, + suffixi={C\bibinitperiod\bibinitdelim M\bibinitperiod\bibinitdelim W\bibinitperiod}}}% + } + \list{organization}{1}{% + {Emerald Insight}% + } + \strng{namehash}{75d8d9e701bff6c60b3e05aa6c0c3f9f} + \strng{fullhash}{75d8d9e701bff6c60b3e05aa6c0c3f9f} + \strng{bibnamehash}{75d8d9e701bff6c60b3e05aa6c0c3f9f} + \strng{authorbibnamehash}{75d8d9e701bff6c60b3e05aa6c0c3f9f} + \strng{authornamehash}{75d8d9e701bff6c60b3e05aa6c0c3f9f} + \strng{authorfullhash}{75d8d9e701bff6c60b3e05aa6c0c3f9f} + \field{sortinit}{1} + \field{sortinithash}{2174f786c6195e7fe2ee1c229b416e29} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{Journal of Consumer Marketing, Volume 34. Issue 6} + \field{title}{Social media sentiment analysis: lexicon versus machine learning} + \field{year}{2017} + \verb{urlraw} + \verb https://www.emeraldinsight.com/doi/pdfplus/10.1108/JCM-03-2017-2141 + \endverb + \verb{url} + \verb https://www.emeraldinsight.com/doi/pdfplus/10.1108/JCM-03-2017-2141 + \endverb + \endentry + \entry{10}{inproceedings}{} + \name{author}{2}{}{% + {{hash=45764413ab79a1c3696604be19148c89}{% + family={Hutto}, + familyi={H\bibinitperiod}, + given={C.J.}, + giveni={C\bibinitperiod}}}% + {{hash=c3163690c9103d2541d6910886053ba6}{% + family={Gilbert}, + familyi={G\bibinitperiod}, + given={Eric}, + giveni={E\bibinitperiod}}}% + } + \list{organization}{1}{% + {Ann Arbor, MI}% + } + \strng{namehash}{13e8125b59ac4c2baf7ccab9e513e9a5} + \strng{fullhash}{13e8125b59ac4c2baf7ccab9e513e9a5} + \strng{bibnamehash}{13e8125b59ac4c2baf7ccab9e513e9a5} + \strng{authorbibnamehash}{13e8125b59ac4c2baf7ccab9e513e9a5} + \strng{authornamehash}{13e8125b59ac4c2baf7ccab9e513e9a5} + \strng{authorfullhash}{13e8125b59ac4c2baf7ccab9e513e9a5} + \field{sortinit}{1} + \field{sortinithash}{2174f786c6195e7fe2ee1c229b416e29} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{Eighth International Conference on Weblogs and Social Media (ICWSM-14)} + \field{title}{VADER: A Parsimonious Rule-based Model for Sentiment Analysis of Social Media Text} + \field{year}{2014} + \verb{urlraw} + \verb https://www.aaai.org/ocs/index.php/ICWSM/ICWSM14/paper/download/8109/8122 + \endverb + \verb{url} + \verb https://www.aaai.org/ocs/index.php/ICWSM/ICWSM14/paper/download/8109/8122 + \endverb + \endentry + \entry{11}{inproceedings}{} + \name{author}{1}{}{% + {{hash=47a5d7268039bf86cea626bcaefd4877}{% + family={Kenton}, + familyi={K\bibinitperiod}, + given={Will}, + giveni={W\bibinitperiod}}}% + } + \list{organization}{1}{% + {Investopedia}% + } + \strng{namehash}{47a5d7268039bf86cea626bcaefd4877} + \strng{fullhash}{47a5d7268039bf86cea626bcaefd4877} + \strng{bibnamehash}{47a5d7268039bf86cea626bcaefd4877} + \strng{authorbibnamehash}{47a5d7268039bf86cea626bcaefd4877} + \strng{authornamehash}{47a5d7268039bf86cea626bcaefd4877} + \strng{authorfullhash}{47a5d7268039bf86cea626bcaefd4877} + \field{sortinit}{1} + \field{sortinithash}{2174f786c6195e7fe2ee1c229b416e29} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Wisdom of Crowds} + \field{year}{2018} + \verb{urlraw} + \verb https://www.investopedia.com/terms/w/wisdom-crowds.asp + \endverb + \verb{url} + \verb https://www.investopedia.com/terms/w/wisdom-crowds.asp + \endverb + \endentry + \entry{12}{inproceedings}{} + \name{author}{1}{}{% + {{hash=98de2b3b288cf4bd291e8937e5e28c49}{% + family={Skymind}, + familyi={S\bibinitperiod}}}% + } + \list{organization}{1}{% + {Skymind}% + } + \strng{namehash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{fullhash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{bibnamehash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{authorbibnamehash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{authornamehash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{authorfullhash}{98de2b3b288cf4bd291e8937e5e28c49} + \field{sortinit}{2} + \field{sortinithash}{cbff857e587bcb4635511624d773949e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{A.I. Wiki} + \field{title}{A Beginner's Guide to Neural Networks and Deep Learning} + \field{year}{2018} + \verb{urlraw} + \verb https://skymind.ai/wiki/neural-network + \endverb + \verb{url} + \verb https://skymind.ai/wiki/neural-network + \endverb + \endentry + \entry{13}{inproceedings}{} + \name{author}{1}{}{% + {{hash=b5590c43c6c12766886d1cdcae8c5fc3}{% + family={DeMuro}, + familyi={D\bibinitperiod}, + given={Jonas}, + giveni={J\bibinitperiod}}}% + } + \list{organization}{1}{% + {techradar}% + } + \strng{namehash}{b5590c43c6c12766886d1cdcae8c5fc3} + \strng{fullhash}{b5590c43c6c12766886d1cdcae8c5fc3} + \strng{bibnamehash}{b5590c43c6c12766886d1cdcae8c5fc3} + \strng{authorbibnamehash}{b5590c43c6c12766886d1cdcae8c5fc3} + \strng{authornamehash}{b5590c43c6c12766886d1cdcae8c5fc3} + \strng{authorfullhash}{b5590c43c6c12766886d1cdcae8c5fc3} + \field{sortinit}{2} + \field{sortinithash}{cbff857e587bcb4635511624d773949e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{World of tech} + \field{title}{What is a neural network} + \field{year}{2018} + \verb{urlraw} + \verb https://www.techradar.com/uk/news/what-is-a-neural-network + \endverb + \verb{url} + \verb https://www.techradar.com/uk/news/what-is-a-neural-network + \endverb + \endentry + \entry{14}{inproceedings}{} + \name{author}{1}{}{% + {{hash=7ec8fd610c35dade82ca0d76c0e6a7ee}{% + family={Bach}, + familyi={B\bibinitperiod}, + given={F.R.}, + giveni={F\bibinitperiod}}}% + } + \list{organization}{1}{% + {NIPS Proceedings}% + } + \strng{namehash}{7ec8fd610c35dade82ca0d76c0e6a7ee} + \strng{fullhash}{7ec8fd610c35dade82ca0d76c0e6a7ee} + \strng{bibnamehash}{7ec8fd610c35dade82ca0d76c0e6a7ee} + \strng{authorbibnamehash}{7ec8fd610c35dade82ca0d76c0e6a7ee} + \strng{authornamehash}{7ec8fd610c35dade82ca0d76c0e6a7ee} + \strng{authorfullhash}{7ec8fd610c35dade82ca0d76c0e6a7ee} + \field{sortinit}{2} + \field{sortinithash}{cbff857e587bcb4635511624d773949e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{Advances in neural information processing systems} + \field{title}{Supervised dictionary learning} + \field{year}{2009} + \field{pages}{1033\bibrangedash 1040} + \range{pages}{8} + \verb{urlraw} + \verb http://papers.nips.cc/paper/3448-supervised-dictionary-learning + \endverb + \verb{url} + \verb http://papers.nips.cc/paper/3448-supervised-dictionary-learning + \endverb + \warn{\item Name "Mairal, J., Ponce, J., Sapiro, G., Zisserman, A." has too many commas: skipping name} + \endentry + \entry{15}{inproceedings}{} + \name{author}{3}{}{% + {{hash=d198ae71c61c758b90826c452447133f}{% + family={Rumelhart}, + familyi={R\bibinitperiod}, + given={David\bibnamedelima E}, + giveni={D\bibinitperiod\bibinitdelim E\bibinitperiod}}}% + {{hash=5aeb1de9b6c6f00a078230c628e6f41c}{% + family={Hinton}, + familyi={H\bibinitperiod}, + given={Geoffrey\bibnamedelima E}, + giveni={G\bibinitperiod\bibinitdelim E\bibinitperiod}}}% + {{hash=f0fac00adb48128fc1f7750b2d8e7065}{% + family={Williams}, + familyi={W\bibinitperiod}, + given={Ronald\bibnamedelima J}, + giveni={R\bibinitperiod\bibinitdelim J\bibinitperiod}}}% + } + \list{organization}{1}{% + {California Univ San Diego La Jolla Inst for Cognitive Science}% + } + \strng{namehash}{6c0c2652b5e4e9258c40ce50c5704c6b} + \strng{fullhash}{6c0c2652b5e4e9258c40ce50c5704c6b} + \strng{bibnamehash}{6c0c2652b5e4e9258c40ce50c5704c6b} + \strng{authorbibnamehash}{6c0c2652b5e4e9258c40ce50c5704c6b} + \strng{authornamehash}{6c0c2652b5e4e9258c40ce50c5704c6b} + \strng{authorfullhash}{6c0c2652b5e4e9258c40ce50c5704c6b} + \field{sortinit}{2} + \field{sortinithash}{cbff857e587bcb4635511624d773949e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Learning internal representations by error propagation} + \field{year}{1985} + \verb{urlraw} + \verb https://apps.dtic.mil/docs/citations/ADA164453 + \endverb + \verb{url} + \verb https://apps.dtic.mil/docs/citations/ADA164453 + \endverb + \endentry + \entry{16}{inproceedings}{} + \name{author}{1}{}{% + {{hash=98de2b3b288cf4bd291e8937e5e28c49}{% + family={Skymind}, + familyi={S\bibinitperiod}}}% + } + \list{organization}{1}{% + {Skymind}% + } + \strng{namehash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{fullhash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{bibnamehash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{authorbibnamehash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{authornamehash}{98de2b3b288cf4bd291e8937e5e28c49} + \strng{authorfullhash}{98de2b3b288cf4bd291e8937e5e28c49} + \field{sortinit}{2} + \field{sortinithash}{cbff857e587bcb4635511624d773949e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{A.I. Wiki} + \field{title}{A Beginner's Guide to LSTMs and Recurrent Neural Networks} + \field{year}{2018} + \verb{urlraw} + \verb https://skymind.ai/wiki/lstm + \endverb + \verb{url} + \verb https://skymind.ai/wiki/lstm + \endverb + \endentry + \entry{17}{inproceedings}{} + \name{author}{1}{}{% + {{hash=ad6e7c0188bc1f913c6dad630fde9e39}{% + family={Donges}, + familyi={D\bibinitperiod}, + given={Niklas}, + giveni={N\bibinitperiod}}}% + } + \list{organization}{1}{% + {Towards Data Science}% + } + \strng{namehash}{ad6e7c0188bc1f913c6dad630fde9e39} + \strng{fullhash}{ad6e7c0188bc1f913c6dad630fde9e39} + \strng{bibnamehash}{ad6e7c0188bc1f913c6dad630fde9e39} + \strng{authorbibnamehash}{ad6e7c0188bc1f913c6dad630fde9e39} + \strng{authornamehash}{ad6e7c0188bc1f913c6dad630fde9e39} + \strng{authorfullhash}{ad6e7c0188bc1f913c6dad630fde9e39} + \field{sortinit}{2} + \field{sortinithash}{cbff857e587bcb4635511624d773949e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Recurrent Neural Networks and LSTM} + \field{year}{2018} + \verb{urlraw} + \verb https://towardsdatascience.com/recurrent-neural-networks-and-lstm-4b601dd822a5 + \endverb + \verb{url} + \verb https://towardsdatascience.com/recurrent-neural-networks-and-lstm-4b601dd822a5 + \endverb + \endentry + \entry{18}{inproceedings}{} + \name{author}{1}{}{% + {{hash=37ec6490760a0f7d5715b2e904f6cf20}{% + family={Jason\bibnamedelima Brownlee}, + familyi={J\bibinitperiod\bibinitdelim B\bibinitperiod}, + given={PhD.}, + giveni={P\bibinitperiod}}}% + } + \list{organization}{1}{% + {Machine Larning Mastery}% + } + \strng{namehash}{37ec6490760a0f7d5715b2e904f6cf20} + \strng{fullhash}{37ec6490760a0f7d5715b2e904f6cf20} + \strng{bibnamehash}{37ec6490760a0f7d5715b2e904f6cf20} + \strng{authorbibnamehash}{37ec6490760a0f7d5715b2e904f6cf20} + \strng{authornamehash}{37ec6490760a0f7d5715b2e904f6cf20} + \strng{authorfullhash}{37ec6490760a0f7d5715b2e904f6cf20} + \field{sortinit}{2} + \field{sortinithash}{cbff857e587bcb4635511624d773949e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{A Gentle Introduction to Exploding Gradients in Neural Networks} + \field{year}{2017} + \verb{urlraw} + \verb https://machinelearningmastery.com/exploding-gradients-in-neural-networks/ + \endverb + \verb{url} + \verb https://machinelearningmastery.com/exploding-gradients-in-neural-networks/ + \endverb + \endentry + \entry{19}{inproceedings}{} + \name{author}{1}{}{% + {{hash=2397822f564bc95b5d3e07a3cc633029}{% + family={Team}, + familyi={T\bibinitperiod}, + given={Super\bibnamedelimb Data\bibnamedelima Science}, + giveni={S\bibinitperiod\bibinitdelim D\bibinitperiod\bibinitdelim S\bibinitperiod}}}% + } + \list{organization}{1}{% + {Super Data Science}% + } + \strng{namehash}{2397822f564bc95b5d3e07a3cc633029} + \strng{fullhash}{2397822f564bc95b5d3e07a3cc633029} + \strng{bibnamehash}{2397822f564bc95b5d3e07a3cc633029} + \strng{authorbibnamehash}{2397822f564bc95b5d3e07a3cc633029} + \strng{authornamehash}{2397822f564bc95b5d3e07a3cc633029} + \strng{authorfullhash}{2397822f564bc95b5d3e07a3cc633029} + \field{sortinit}{3} + \field{sortinithash}{a4b52e5432884761f50fb9571273b93e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Recurrent Neural Networks (RNN) - The Vanishing Gradient Problem} + \field{year}{2018} + \verb{urlraw} + \verb https://www.superdatascience.com/blogs/recurrent-neural-networks-rnn-the-vanishing-gradient-problem + \endverb + \verb{url} + \verb https://www.superdatascience.com/blogs/recurrent-neural-networks-rnn-the-vanishing-gradient-problem + \endverb + \endentry + \entry{20}{inproceedings}{} + \name{author}{2}{}{% + {{hash=41b31e29fb2bdbf9f5c9c1b0d5b3e815}{% + family={Hochreiter}, + familyi={H\bibinitperiod}, + given={Sepp}, + giveni={S\bibinitperiod}}}% + {{hash=c3a004dc2b8b6fb4dd79c5b8c1469da7}{% + family={Schmidhuber}, + familyi={S\bibinitperiod}, + given={Jurgen}, + giveni={J\bibinitperiod}}}% + } + \list{organization}{1}{% + {MIT Press}% + } + \strng{namehash}{ed8d27cce26f5380dbdf235ccef1a4f6} + \strng{fullhash}{ed8d27cce26f5380dbdf235ccef1a4f6} + \strng{bibnamehash}{ed8d27cce26f5380dbdf235ccef1a4f6} + \strng{authorbibnamehash}{ed8d27cce26f5380dbdf235ccef1a4f6} + \strng{authornamehash}{ed8d27cce26f5380dbdf235ccef1a4f6} + \strng{authorfullhash}{ed8d27cce26f5380dbdf235ccef1a4f6} + \field{sortinit}{3} + \field{sortinithash}{a4b52e5432884761f50fb9571273b93e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{booktitle}{Neural computation, Volume 9. 8} + \field{title}{Long short-term memory} + \field{year}{1997} + \field{pages}{1735\bibrangedash 1780} + \range{pages}{46} + \verb{urlraw} + \verb https://www.bioinf.jku.at/publications/older/2604.pdf + \endverb + \verb{url} + \verb https://www.bioinf.jku.at/publications/older/2604.pdf + \endverb + \endentry + \entry{21}{inproceedings}{} + \name{author}{1}{}{% + {{hash=9979e7c606c1876cb09cf32b78628d71}{% + family={Yan}, + familyi={Y\bibinitperiod}, + given={Shi}, + giveni={S\bibinitperiod}}}% + } + \list{organization}{1}{% + {Medium}% + } + \strng{namehash}{9979e7c606c1876cb09cf32b78628d71} + \strng{fullhash}{9979e7c606c1876cb09cf32b78628d71} + \strng{bibnamehash}{9979e7c606c1876cb09cf32b78628d71} + \strng{authorbibnamehash}{9979e7c606c1876cb09cf32b78628d71} + \strng{authornamehash}{9979e7c606c1876cb09cf32b78628d71} + \strng{authorfullhash}{9979e7c606c1876cb09cf32b78628d71} + \field{sortinit}{3} + \field{sortinithash}{a4b52e5432884761f50fb9571273b93e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Understanding LSTM and its diagrams} + \field{year}{Mar 13, 2016} + \verb{urlraw} + \verb https://medium.com/mlreview/understanding-lstm-and-its-diagrams-37e2f46f1714 + \endverb + \verb{url} + \verb https://medium.com/mlreview/understanding-lstm-and-its-diagrams-37e2f46f1714 + \endverb + \endentry + \entry{22}{inproceedings}{} + \name{author}{1}{}{% + {{hash=dea937fb216bfbba25c1bfa1f36a63fe}{% + family={Olah}, + familyi={O\bibinitperiod}, + given={Christopher}, + giveni={C\bibinitperiod}}}% + } + \strng{namehash}{dea937fb216bfbba25c1bfa1f36a63fe} + \strng{fullhash}{dea937fb216bfbba25c1bfa1f36a63fe} + \strng{bibnamehash}{dea937fb216bfbba25c1bfa1f36a63fe} + \strng{authorbibnamehash}{dea937fb216bfbba25c1bfa1f36a63fe} + \strng{authornamehash}{dea937fb216bfbba25c1bfa1f36a63fe} + \strng{authorfullhash}{dea937fb216bfbba25c1bfa1f36a63fe} + \field{sortinit}{3} + \field{sortinithash}{a4b52e5432884761f50fb9571273b93e} + \field{labelnamesource}{author} + \field{labeltitlesource}{title} + \field{title}{Understanding LSTM Networks} + \field{year}{2015} + \verb{urlraw} + \verb https://colah.github.io/posts/2015-08-Understanding-LSTMs + \endverb + \verb{url} + \verb https://colah.github.io/posts/2015-08-Understanding-LSTMs + \endverb + \endentry \enddatalist \endrefsection \endinput diff --git a/document.bcf b/document.bcf index cbefbc4..ba3e87b 100644 --- a/document.bcf +++ b/document.bcf @@ -1,14 +1,14 @@ - + output_encoding - utf8 + ascii input_encoding - utf8 + ascii debug @@ -360,7 +360,6 @@ uniquework uniqueprimaryauthor defernumbers - locallabelwidth bibwarn useprefix defernums @@ -1563,19 +1562,9 @@ eprinttype eventday eventendday - eventendhour - eventendminute eventendmonth - eventendseason - eventendsecond - eventendtimezone eventendyear - eventhour - eventminute eventmonth - eventseason - eventsecond - eventtimezone eventyear eventtitle eventtitleaddon @@ -1607,19 +1596,9 @@ eprinttype eventday eventendday - eventendhour - eventendminute eventendmonth - eventendseason - eventendsecond - eventendtimezone eventendyear - eventhour - eventminute eventmonth - eventseason - eventsecond - eventtimezone eventyear eventtitle eventtitleaddon @@ -1661,19 +1640,9 @@ eprinttype eventday eventendday - eventendhour - eventendminute eventendmonth - eventendseason - eventendsecond - eventendtimezone eventendyear - eventhour - eventminute eventmonth - eventseason - eventsecond - eventtimezone eventyear eventtitle eventtitleaddon @@ -1747,24 +1716,6 @@ unpublished addendum author - eventday - eventendday - eventendhour - eventendminute - eventendmonth - eventendseason - eventendsecond - eventendtimezone - eventendyear - eventhour - eventminute - eventmonth - eventseason - eventsecond - eventtimezone - eventyear - eventtitle - eventtitleaddon howpublished language location @@ -1773,8 +1724,6 @@ subtitle title titleaddon - type - venue article @@ -1976,6 +1925,42 @@ 1 + 2 + 1 + 3 + 4 + 5 + 6 + 7 + 6 + 1 + 2 + 4 + 8 + 9 + 10 + 10 + 10 + 11 + 10 + 10 + 10 + 12 + 13 + 14 + 13 + 15 + 16 + 17 + 18 + 19 + 20 + 20 + 21 + 21 + 20 + 21 + 22 * diff --git a/document.blg b/document.blg index 7cb6f5f..6805804 100644 --- a/document.blg +++ b/document.blg @@ -1,15 +1,20 @@ -[0] Config.pm:302> INFO - This is Biber 2.11 +[0] Config.pm:302> INFO - This is Biber 2.9 [0] Config.pm:305> INFO - Logfile is 'document.blg' -[30] biber:313> INFO - === Sat Mar 30, 2019, 16:24:19 -[51] Biber.pm:371> INFO - Reading 'document.bcf' -[125] Biber.pm:854> INFO - Using all citekeys in bib section 0 -[141] Biber.pm:3976> INFO - Processing section 0 -[152] Biber.pm:4136> INFO - Looking for bibtex format file 'report.bib' for section 0 -[153] bibtex.pm:1489> INFO - LaTeX decoding ... -[156] bibtex.pm:1305> INFO - Found BibTeX data source 'report.bib' -[176] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'normalization = NFD' with 'normalization = prenormalized' -[177] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'variable = shifted' with 'variable = non-ignorable' -[177] Biber.pm:3804> INFO - Sorting list 'none/global//global/global' of type 'entry' with template 'none' and locale 'en-US' -[177] Biber.pm:3810> INFO - No sort tailoring available for locale 'en-US' -[182] bbl.pm:617> INFO - Writing 'document.bbl' with encoding 'UTF-8' -[183] bbl.pm:720> INFO - Output to document.bbl +[20] biber:313> INFO - === Sun Apr 21, 2019, 23:13:10 +[35] Biber.pm:371> INFO - Reading 'document.bcf' +[81] Biber.pm:854> INFO - Using all citekeys in bib section 0 +[90] Biber.pm:3981> INFO - Processing section 0 +[98] Biber.pm:4154> INFO - Looking for bibtex format file 'report.bib' for section 0 +[99] bibtex.pm:1468> INFO - LaTeX decoding ... +[118] bibtex.pm:1294> INFO - Found BibTeX data source 'report.bib' +[125] Utils.pm:169> WARN - year field 'Mar 13, 2016' in entry '21' is not an integer - this will probably not sort properly. +[179] Utils.pm:169> WARN - Name "Mairal, J., Ponce, J., Sapiro, G., Zisserman, A." has too many commas: skipping name +[180] Utils.pm:169> WARN - BibTeX subsystem: warning: comma(s) at end of name (removing) +[180] Utils.pm:169> WARN - BibTeX subsystem: author, warning: comma(s) at end of name (removing) +[194] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'normalization = NFD' with 'normalization = prenormalized' +[194] UCollate.pm:68> INFO - Overriding locale 'en-US' defaults 'variable = shifted' with 'variable = non-ignorable' +[194] Biber.pm:3809> INFO - Sorting list 'none/global//global/global' of type 'entry' with template 'none' and locale 'en-US' +[194] Biber.pm:3815> INFO - No sort tailoring available for locale 'en-US' +[208] bbl.pm:617> INFO - Writing 'document.bbl' with encoding 'ascii' +[216] bbl.pm:720> INFO - Output to document.bbl +[216] Biber.pm:110> INFO - WARNINGS: 4 diff --git a/document.log b/document.log index 96e985e..72b1719 100644 --- a/document.log +++ b/document.log @@ -1,23 +1,24 @@ -This is pdfTeX, Version 3.14159265-2.6-1.40.19 (TeX Live 2019/dev/Debian) (preloaded format=pdflatex 2019.2.14) 30 MAR 2019 16:24 +This is pdfTeX, Version 3.14159265-2.6-1.40.18 (TeX Live 2017/Debian) (preloaded format=pdflatex 2018.10.16) 21 APR 2019 23:14 entering extended mode restricted \write18 enabled. %&-line parsing enabled. **document.tex (./document.tex -LaTeX2e <2018-04-01> patch level 5 +LaTeX2e <2017-04-15> +Babel <3.18> and hyphenation patterns for 84 language(s) loaded. (/usr/share/texlive/texmf-dist/tex/latex/base/article.cls Document Class: article 2014/09/29 v1.4h Standard LaTeX document class (/usr/share/texlive/texmf-dist/tex/latex/base/size10.clo File: size10.clo 2014/09/29 v1.4h Standard LaTeX file (size option) ) -\c@part=\count80 -\c@section=\count81 -\c@subsection=\count82 -\c@subsubsection=\count83 -\c@paragraph=\count84 -\c@subparagraph=\count85 -\c@figure=\count86 -\c@table=\count87 +\c@part=\count79 +\c@section=\count80 +\c@subsection=\count81 +\c@subsubsection=\count82 +\c@paragraph=\count83 +\c@subparagraph=\count84 +\c@figure=\count85 +\c@table=\count86 \abovecaptionskip=\skip41 \belowcaptionskip=\skip42 \bibindent=\dimen102 @@ -42,13 +43,13 @@ Package: amsbsy 1999/11/29 v1.2d Bold Symbols (/usr/share/texlive/texmf-dist/tex/latex/amsmath/amsopn.sty Package: amsopn 2016/03/08 v2.02 operator names ) -\inf@bad=\count88 +\inf@bad=\count87 LaTeX Info: Redefining \frac on input line 213. -\uproot@=\count89 -\leftroot@=\count90 +\uproot@=\count88 +\leftroot@=\count89 LaTeX Info: Redefining \overline on input line 375. -\classnum@=\count91 -\DOTSCASE@=\count92 +\classnum@=\count90 +\DOTSCASE@=\count91 LaTeX Info: Redefining \ldots on input line 472. LaTeX Info: Redefining \dots on input line 475. LaTeX Info: Redefining \cdots on input line 596. @@ -57,15 +58,15 @@ LaTeX Info: Redefining \cdots on input line 596. \big@size=\dimen105 LaTeX Font Info: Redeclaring font encoding OML on input line 712. LaTeX Font Info: Redeclaring font encoding OMS on input line 713. -\macc@depth=\count93 -\c@MaxMatrixCols=\count94 +\macc@depth=\count92 +\c@MaxMatrixCols=\count93 \dotsspace@=\muskip10 -\c@parentequation=\count95 -\dspbrk@lvl=\count96 +\c@parentequation=\count94 +\dspbrk@lvl=\count95 \tag@help=\toks15 -\row@=\count97 -\column@=\count98 -\maxfields@=\count99 +\row@=\count96 +\column@=\count97 +\maxfields@=\count98 \andhelp@=\toks16 \eqnshift@=\dimen106 \alignsep@=\dimen107 @@ -88,24 +89,22 @@ LaTeX Font Info: Overwriting math alphabet `\mathfrak' in version `bold' (Font) U/euf/m/n --> U/euf/b/n on input line 106. ) (/usr/share/texlive/texmf-dist/tex/latex/multirow/multirow.sty -Package: multirow 2018/08/03 v2.3 Span multiple rows of a table +Package: multirow 2016/11/25 v2.2 Span multiple rows of a table \multirow@colwidth=\skip46 -\multirow@cntb=\count100 +\multirow@cntb=\count99 \multirow@dima=\skip47 \bigstrutjot=\dimen112 ) (/usr/share/texlive/texmf-dist/tex/latex/colortbl/colortbl.sty -Package: colortbl 2018/05/02 v1.0c Color table columns (DPC) +Package: colortbl 2012/02/13 v1.0a Color table columns (DPC) (/usr/share/texlive/texmf-dist/tex/latex/tools/array.sty -Package: array 2018/04/30 v2.4h Tabular extension package (FMi) +Package: array 2016/10/06 v2.4d Tabular extension package (FMi) \col@sep=\dimen113 -\ar@mcellbox=\box28 \extrarowheight=\dimen114 \NC@list=\toks19 \extratabsurround=\skip48 \backup@length=\skip49 -\ar@cellbox=\box29 ) (/usr/share/texlive/texmf-dist/tex/latex/graphics/color.sty Package: color 2016/07/10 v1.1e Standard LaTeX Color (DPC) @@ -129,7 +128,7 @@ File: color.cfg 2016/01/02 v1.6 sample color configuration ) Package xcolor Info: Driver file: pdftex.def on input line 225. LaTeX Info: Redefining \color on input line 709. -\rownum=\count101 +\rownum=\count100 Package xcolor Info: Model `cmy' substituted by `cmy0' on input line 1348. Package xcolor Info: Model `hsb' substituted by `rgb' on input line 1352. Package xcolor Info: Model `RGB' extended on input line 1364. @@ -148,11 +147,11 @@ Package: epigraph 2009/09/02 v1.5c typesetting epigraphs \epigraphrule=\skip54 ) (/usr/share/texlive/texmf-dist/tex/latex/caption/caption.sty -Package: caption 2018/05/01 v3.3-147 Customizing captions (AR) +Package: caption 2016/02/21 v3.3-144 Customizing captions (AR) (/usr/share/texlive/texmf-dist/tex/latex/caption/caption3.sty -Package: caption3 2018/05/27 v1.8a caption3 kernel (AR) -Package caption3 Info: TeX engine: e-TeX on input line 64. +Package: caption3 2016/05/22 v1.7-166 caption3 kernel (AR) +Package caption3 Info: TeX engine: e-TeX on input line 67. (/usr/share/texlive/texmf-dist/tex/latex/graphics/keyval.sty Package: keyval 2014/10/28 v1.15 key=value parser (DPC) @@ -166,30 +165,30 @@ Package: keyval 2014/10/28 v1.15 key=value parser (DPC) \caption@parindent=\dimen120 \caption@hangindent=\dimen121 ) -\c@ContinuedFloat=\count102 +\c@ContinuedFloat=\count101 ) (/usr/share/texlive/texmf-dist/tex/latex/caption/subcaption.sty -Package: subcaption 2018/05/01 v1.1-162 Sub-captions (AR) -\c@subfigure=\count103 -\c@subtable=\count104 +Package: subcaption 2016/05/22 v1.1-161 Sub-captions (AR) +\c@subfigure=\count102 +\c@subtable=\count103 ) (/usr/share/texlive/texmf-dist/tex/latex/tools/tabularx.sty -Package: tabularx 2016/02/03 v2.11b `tabularx' package (DPC) +Package: tabularx 2016/02/03 v2.11 `tabularx' package (DPC) \TX@col@width=\dimen122 \TX@old@table=\dimen123 \TX@old@col=\dimen124 \TX@target=\dimen125 \TX@delta=\dimen126 -\TX@cols=\count105 +\TX@cols=\count104 \TX@ftn=\toks22 ) (/usr/share/texlive/texmf-dist/tex/latex/float/float.sty Package: float 2001/11/08 v1.3d Float enhancements (AL) -\c@float@type=\count106 +\c@float@type=\count105 \float@exts=\toks23 -\float@box=\box30 +\float@box=\box28 \@float@everytoks=\toks24 -\@floatcapt=\box31 +\@floatcapt=\box29 ) (/usr/share/texlive/texmf-dist/tex/latex/tools/longtable.sty Package: longtable 2014/10/28 v4.11 Multi-page Table package (DPC) @@ -197,16 +196,16 @@ Package: longtable 2014/10/28 v4.11 Multi-page Table package (DPC) \LTright=\skip56 \LTpre=\skip57 \LTpost=\skip58 -\LTchunksize=\count107 +\LTchunksize=\count106 \LTcapwidth=\dimen127 -\LT@head=\box32 -\LT@firsthead=\box33 -\LT@foot=\box34 -\LT@lastfoot=\box35 -\LT@cols=\count108 -\LT@rows=\count109 -\c@LT@tables=\count110 -\c@LT@chunks=\count111 +\LT@head=\box30 +\LT@firsthead=\box31 +\LT@foot=\box32 +\LT@lastfoot=\box33 +\LT@cols=\count107 +\LT@rows=\count108 +\c@LT@tables=\count109 +\c@LT@chunks=\count110 \LT@p@ftn=\toks25 ) (/usr/share/texlive/texmf-dist/tex/latex/graphics/graphicx.sty @@ -233,20 +232,20 @@ Package: pdfpages 2017/10/31 v0.5l Insert pages of external PDF documents (AM) Package: ifthen 2014/09/29 v1.1c Standard LaTeX ifthen package (DPC) ) (/usr/share/texlive/texmf-dist/tex/latex/tools/calc.sty -Package: calc 2017/05/25 v4.3 Infix arithmetic (KKT,FJ) -\calc@Acount=\count112 -\calc@Bcount=\count113 +Package: calc 2014/10/28 v4.3 Infix arithmetic (KKT,FJ) +\calc@Acount=\count111 +\calc@Bcount=\count112 \calc@Adimen=\dimen130 \calc@Bdimen=\dimen131 \calc@Askip=\skip59 \calc@Bskip=\skip60 LaTeX Info: Redefining \setlength on input line 80. LaTeX Info: Redefining \addtolength on input line 81. -\calc@Ccount=\count114 +\calc@Ccount=\count113 \calc@Cskip=\skip61 ) (/usr/share/texlive/texmf-dist/tex/latex/eso-pic/eso-pic.sty -Package: eso-pic 2018/04/12 v2.0h eso-pic (RN) +Package: eso-pic 2015/07/21 v2.0g eso-pic (RN) (/usr/share/texlive/texmf-dist/tex/generic/oberdiek/atbegshi.sty Package: atbegshi 2016/06/09 v1.18 At begin shipout hook (HO) @@ -266,11 +265,11 @@ Package: ifpdf 2017/03/15 v3.2 Provides the ifpdf switch (/usr/share/texlive/texmf-dist/tex/latex/pdfpages/pppdftex.def File: pppdftex.def 2017/10/31 v0.5l Pdfpages driver for pdfTeX (AM) ) -\AM@pagebox=\box36 +\AM@pagebox=\box34 \AM@global@opts=\toks26 \AM@toc@title=\toks27 -\c@AM@survey=\count115 -\AM@templatesizebox=\box37 +\c@AM@survey=\count114 +\AM@templatesizebox=\box35 ) (/usr/share/texlive/texmf-dist/tex/latex/oberdiek/pdflscape.sty Package: pdflscape 2016/05/14 v0.11 Display of landscape pages in PDF (HO) @@ -284,7 +283,7 @@ Package pdflscape Info: Auto-detected driver: pdftex on input line 81. Package: ifxetex 2010/09/12 v0.6 Provides ifxetex conditional )) (/usr/share/texlive/texmf-dist/tex/latex/glossaries/base/glossaries.sty -Package: glossaries 2018/07/23 v4.41 (NLCT) +Package: glossaries 2017/11/14 v4.35 (NLCT) (/usr/share/texlive/texmf-dist/tex/latex/xkeyval/xkeyval.sty Package: xkeyval 2014/12/03 v2.7a package option processing (HA) @@ -294,15 +293,15 @@ Package: xkeyval 2014/12/03 v2.7a package option processing (HA) \XKV@toks=\toks28 \XKV@tempa@toks=\toks29 ) -\XKV@depth=\count116 +\XKV@depth=\count115 File: xkeyval.tex 2014/12/03 v2.7a key=value parser (HA) )) (/usr/share/texlive/texmf-dist/tex/latex/mfirstuc/mfirstuc.sty Package: mfirstuc 2017/11/14 v2.06 (NLCT) (/usr/share/texlive/texmf-dist/tex/latex/etoolbox/etoolbox.sty -Package: etoolbox 2018/08/19 v2.5f e-TeX tools for LaTeX (JAW) -\etb@tempcnta=\count117 +Package: etoolbox 2018/02/11 v2.5e e-TeX tools for LaTeX (JAW) +\etb@tempcnta=\count116 ) \@glsmfirst=\toks30 \@glsmrest=\toks31 @@ -314,14 +313,14 @@ Package: textcase 2004/10/07 v0.07 Text only upper/lower case changing (DPC) Package: xfor 2009/02/05 v1.05 (NLCT) ) (/usr/share/texlive/texmf-dist/tex/latex/datatool/datatool-base.sty -Package: datatool-base 2018/04/16 v2.30 (NLCT) +Package: datatool-base 2017/11/12 v2.29 (NLCT) (/usr/share/texlive/texmf-dist/tex/latex/substr/substr.sty Package: substr 2009/10/20 v1.2 Handle substrings -\c@su@anzahl=\count118 +\c@su@anzahl=\count117 ) (/usr/share/texlive/texmf-dist/tex/latex/datatool/datatool-fp.sty -Package: datatool-fp 2018/04/16 v2.30 (NLCT) +Package: datatool-fp 2017/11/12 v2.29 (NLCT) (/usr/share/texlive/texmf-dist/tex/latex/fp/fp.sty Package: fp 1995/04/02 @@ -329,19 +328,19 @@ Package: fp 1995/04/02 `Fixed Point Package', Version 0.8, April 2, 1995 (C) Michael Mehlich (/usr/share/texlive/texmf-dist/tex/latex/fp/defpattern.sty Package: defpattern 1994/10/12 -\actioncount=\count119 +\actioncount=\count118 ) (/usr/share/texlive/texmf-dist/tex/latex/fp/fp-basic.sty Package: fp-basic 1996/05/13 -\FP@xs=\count120 -\FP@xia=\count121 -\FP@xib=\count122 -\FP@xfa=\count123 -\FP@xfb=\count124 -\FP@rega=\count125 -\FP@regb=\count126 -\FP@regs=\count127 -\FP@times=\count128 +\FP@xs=\count119 +\FP@xia=\count120 +\FP@xib=\count121 +\FP@xfa=\count122 +\FP@xfb=\count123 +\FP@rega=\count124 +\FP@regb=\count125 +\FP@regs=\count126 +\FP@times=\count127 ) (/usr/share/texlive/texmf-dist/tex/latex/fp/fp-addons.sty Package: fp-addons 1995/03/15 @@ -360,7 +359,7 @@ Package: fp-pas 1994/08/29 ) (/usr/share/texlive/texmf-dist/tex/latex/fp/fp-random.sty Package: fp-random 1995/02/23 -\FPseed=\count129 +\FPseed=\count128 ) (/usr/share/texlive/texmf-dist/tex/latex/fp/fp-eqn.sty Package: fp-eqn 1995/04/03 @@ -372,20 +371,20 @@ Package: fp-upn 1996/10/21 Package: fp-eval 1995/04/03 ))) \@dtl@toks=\toks32 -\@dtl@tmpcount=\count130 +\@dtl@tmpcount=\count129 \dtl@tmplength=\skip62 -\dtl@sortresult=\count131 -\@dtl@numgrpsepcount=\count132 -\@dtl@datatype=\count133 -\dtl@codeA=\count134 -\dtl@codeB=\count135 -\@dtl@foreach@level=\count136 +\dtl@sortresult=\count130 +\@dtl@numgrpsepcount=\count131 +\@dtl@datatype=\count132 +\dtl@codeA=\count133 +\dtl@codeB=\count134 +\@dtl@foreach@level=\count135 ) (/usr/share/texlive/texmf-dist/tex/latex/glossaries/base/glossaries-compatible- 307.sty -Package: glossaries-compatible-307 2018/07/23 v4.41 (NLCT) +Package: glossaries-compatible-307 2017/11/14 v4.35 (NLCT) ) -\gls@level=\count137 +\gls@level=\count136 \@gls@tmpb=\toks33 \gls@tmplen=\skip63 \glskeylisttok=\toks34 @@ -395,20 +394,20 @@ Package: glossaries-compatible-307 2018/07/23 v4.41 (NLCT) (/usr/share/texlive/texmf-dist/tex/latex/glossaries/styles/glossary-hypernav.st y -Package: glossary-hypernav 2018/07/23 v4.41 (NLCT) +Package: glossary-hypernav 2017/11/14 v4.35 (NLCT) ) (/usr/share/texlive/texmf-dist/tex/latex/glossaries/styles/glossary-list.sty -Package: glossary-list 2018/07/23 v4.41 (NLCT) +Package: glossary-list 2017/11/14 v4.35 (NLCT) \glslistdottedwidth=\skip64 ) (/usr/share/texlive/texmf-dist/tex/latex/glossaries/styles/glossary-long.sty -Package: glossary-long 2018/07/23 v4.41 (NLCT) +Package: glossary-long 2017/11/14 v4.35 (NLCT) \glsdescwidth=\skip65 \glspagelistwidth=\skip66 ) (/usr/share/texlive/texmf-dist/tex/latex/glossaries/styles/glossary-super.sty -Package: glossary-super 2018/07/23 v4.41 (NLCT) +Package: glossary-super 2017/11/14 v4.35 (NLCT) (/usr/share/texlive/texmf-dist/tex/latex/supertabular/supertabular.sty Package: supertabular 2004/02/20 v4.1e the supertabular environment -\c@tracingst=\count138 +\c@tracingst=\count137 \ST@wd=\dimen134 \ST@rightskip=\skip67 \ST@leftskip=\skip68 @@ -423,22 +422,22 @@ Package: supertabular 2004/02/20 v4.1e the supertabular environment \ST@prevht=\dimen142 \ST@toadd=\dimen143 \ST@dimen=\dimen144 -\ST@pbox=\box38 +\ST@pbox=\box36 )) (/usr/share/texlive/texmf-dist/tex/latex/glossaries/styles/glossary-tree.sty -Package: glossary-tree 2018/07/23 v4.41 (NLCT) +Package: glossary-tree 2017/11/14 v4.35 (NLCT) \glstreeindent=\skip70 )) (/usr/share/texlive/texmf-dist/tex/latex/geometry/geometry.sty -Package: geometry 2018/04/16 v5.8 Page Geometry +Package: geometry 2010/09/12 v5.6 Page Geometry (/usr/share/texlive/texmf-dist/tex/generic/oberdiek/ifvtex.sty Package: ifvtex 2016/05/16 v1.6 Detect VTeX and its facilities (HO) Package ifvtex Info: VTeX not detected. ) -\Gm@cnth=\count139 -\Gm@cntv=\count140 -\c@Gm@tempcnt=\count141 +\Gm@cnth=\count138 +\Gm@cntv=\count139 +\c@Gm@tempcnt=\count140 \Gm@bindingoffset=\dimen145 \Gm@wd@mp=\dimen146 \Gm@odd@mp=\dimen147 @@ -449,11 +448,17 @@ Package ifvtex Info: VTeX not detected. \Gm@layoutvoffset=\dimen152 \Gm@dimlist=\toks38 ) +(/usr/share/texlive/texmf-dist/tex/latex/titling/titling.sty +Package: titling 2009/09/04 v2.1d maketitle typesetting +\thanksmarkwidth=\skip71 +\thanksmargin=\skip72 +\droptitle=\skip73 +) (/usr/share/texlive/texmf-dist/tex/latex/biblatex/biblatex.sty -Package: biblatex 2018/03/04 v3.11 programmable bibliographies (PK/MW) +Package: biblatex 2017/12/19 v3.10 programmable bibliographies (PK/JW/AB) (/usr/share/texlive/texmf-dist/tex/generic/oberdiek/pdftexcmds.sty -Package: pdftexcmds 2018/01/30 v0.27 Utility functions of pdfTeX for LuaTeX (HO +Package: pdftexcmds 2018/01/21 v0.26 Utility functions of pdfTeX for LuaTeX (HO ) (/usr/share/texlive/texmf-dist/tex/generic/oberdiek/ifluatex.sty @@ -473,10 +478,14 @@ Package: kvsetkeys 2016/05/16 v1.17 Key value parser (HO) (/usr/share/texlive/texmf-dist/tex/generic/oberdiek/etexcmds.sty Package: etexcmds 2016/05/16 v1.6 Avoid name clashes with e-TeX commands (HO) +Package etexcmds Info: Could not find \expanded. +(etexcmds) That can mean that you are not using pdfTeX 1.50 or +(etexcmds) that some package has redefined \expanded. +(etexcmds) In the latter case, load this package earlier. ))) (/usr/share/texlive/texmf-dist/tex/latex/logreq/logreq.sty Package: logreq 2010/08/04 v1.0 xml request logger -\lrq@indent=\count142 +\lrq@indent=\count141 (/usr/share/texlive/texmf-dist/tex/latex/logreq/logreq.def File: logreq.def 2010/08/04 v1.0 logreq spec v1.0 @@ -488,208 +497,198 @@ Package: url 2013/09/16 ver 3.4 Verb mode for urls, etc. (/usr/share/texlive/texmf-dist/tex/generic/xstring/xstring.sty (/usr/share/texlive/texmf-dist/tex/generic/xstring/xstring.tex \@xs@message=\write3 -\integerpart=\count143 -\decimalpart=\count144 +\integerpart=\count142 +\decimalpart=\count143 ) Package: xstring 2013/10/13 v1.7c String manipulations (C Tellechea) ) -\c@tabx@nest=\count145 -\c@listtotal=\count146 -\c@listcount=\count147 -\c@liststart=\count148 -\c@liststop=\count149 -\c@citecount=\count150 -\c@citetotal=\count151 -\c@multicitecount=\count152 -\c@multicitetotal=\count153 -\c@instcount=\count154 -\c@maxnames=\count155 -\c@minnames=\count156 -\c@maxitems=\count157 -\c@minitems=\count158 -\c@citecounter=\count159 -\c@savedcitecounter=\count160 -\c@uniquelist=\count161 -\c@uniquename=\count162 -\c@refsection=\count163 -\c@refsegment=\count164 -\c@maxextratitle=\count165 -\c@maxextratitleyear=\count166 -\c@maxextradate=\count167 -\c@maxextraalpha=\count168 -\c@abbrvpenalty=\count169 -\c@highnamepenalty=\count170 -\c@lownamepenalty=\count171 -\c@maxparens=\count172 -\c@parenlevel=\count173 -\blx@tempcnta=\count174 -\blx@tempcntb=\count175 -\blx@tempcntc=\count176 -\blx@maxsection=\count177 -\blx@maxsegment@0=\count178 -\blx@notetype=\count179 -\blx@parenlevel@text=\count180 -\blx@parenlevel@foot=\count181 -\blx@sectionciteorder@0=\count182 -\labelnumberwidth=\skip71 -\labelalphawidth=\skip72 -\biblabelsep=\skip73 -\bibitemsep=\skip74 -\bibnamesep=\skip75 -\bibinitsep=\skip76 -\bibparsep=\skip77 -\bibhang=\skip78 +\c@tabx@nest=\count144 +\c@listtotal=\count145 +\c@listcount=\count146 +\c@liststart=\count147 +\c@liststop=\count148 +\c@citecount=\count149 +\c@citetotal=\count150 +\c@multicitecount=\count151 +\c@multicitetotal=\count152 +\c@instcount=\count153 +\c@maxnames=\count154 +\c@minnames=\count155 +\c@maxitems=\count156 +\c@minitems=\count157 +\c@citecounter=\count158 +\c@savedcitecounter=\count159 +\c@uniquelist=\count160 +\c@uniquename=\count161 +\c@refsection=\count162 +\c@refsegment=\count163 +\c@maxextratitle=\count164 +\c@maxextratitleyear=\count165 +\c@maxextradate=\count166 +\c@maxextraalpha=\count167 +\c@abbrvpenalty=\count168 +\c@highnamepenalty=\count169 +\c@lownamepenalty=\count170 +\c@maxparens=\count171 +\c@parenlevel=\count172 +\blx@tempcnta=\count173 +\blx@tempcntb=\count174 +\blx@tempcntc=\count175 +\blx@maxsection=\count176 +\blx@maxsegment@0=\count177 +\blx@notetype=\count178 +\blx@parenlevel@text=\count179 +\blx@parenlevel@foot=\count180 +\blx@sectionciteorder@0=\count181 +\labelnumberwidth=\skip74 +\labelalphawidth=\skip75 +\biblabelsep=\skip76 +\bibitemsep=\skip77 +\bibnamesep=\skip78 +\bibinitsep=\skip79 +\bibparsep=\skip80 +\bibhang=\skip81 \blx@bcfin=\read1 \blx@bcfout=\write4 -\c@mincomprange=\count183 -\c@maxcomprange=\count184 -\c@mincompwidth=\count185 +\c@mincomprange=\count182 +\c@maxcomprange=\count183 +\c@mincompwidth=\count184 Package biblatex Info: Trying to load biblatex default data model... Package biblatex Info: ... file 'blx-dm.def' found. -(/usr/share/texlive/texmf-dist/tex/latex/biblatex/blx-dm.def -File: blx-dm.def 2018/03/04 v3.11 biblatex localization (PK/MW) -) +(/usr/share/texlive/texmf-dist/tex/latex/biblatex/blx-dm.def) Package biblatex Info: Trying to load biblatex style data model... Package biblatex Info: ... file 'ieee.dbx' not found. Package biblatex Info: Trying to load biblatex custom data model... Package biblatex Info: ... file 'biblatex-dm.cfg' not found. -\c@afterword=\count186 -\c@savedafterword=\count187 -\c@annotator=\count188 -\c@savedannotator=\count189 -\c@author=\count190 -\c@savedauthor=\count191 -\c@bookauthor=\count192 -\c@savedbookauthor=\count193 -\c@commentator=\count194 -\c@savedcommentator=\count195 -\c@editor=\count196 -\c@savededitor=\count197 -\c@editora=\count198 -\c@savededitora=\count199 -\c@editorb=\count266 -\c@savededitorb=\count267 -\c@editorc=\count268 -\c@savededitorc=\count269 -\c@foreword=\count270 -\c@savedforeword=\count271 -\c@holder=\count272 -\c@savedholder=\count273 -\c@introduction=\count274 -\c@savedintroduction=\count275 -\c@namea=\count276 -\c@savednamea=\count277 -\c@nameb=\count278 -\c@savednameb=\count279 -\c@namec=\count280 -\c@savednamec=\count281 -\c@translator=\count282 -\c@savedtranslator=\count283 -\c@shortauthor=\count284 -\c@savedshortauthor=\count285 -\c@shorteditor=\count286 -\c@savedshorteditor=\count287 -\c@labelname=\count288 -\c@savedlabelname=\count289 -\c@institution=\count290 -\c@savedinstitution=\count291 -\c@lista=\count292 -\c@savedlista=\count293 -\c@listb=\count294 -\c@savedlistb=\count295 -\c@listc=\count296 -\c@savedlistc=\count297 -\c@listd=\count298 -\c@savedlistd=\count299 -\c@liste=\count300 -\c@savedliste=\count301 -\c@listf=\count302 -\c@savedlistf=\count303 -\c@location=\count304 -\c@savedlocation=\count305 -\c@organization=\count306 -\c@savedorganization=\count307 -\c@origlocation=\count308 -\c@savedoriglocation=\count309 -\c@origpublisher=\count310 -\c@savedorigpublisher=\count311 -\c@publisher=\count312 -\c@savedpublisher=\count313 -\c@language=\count314 -\c@savedlanguage=\count315 -\c@origlanguage=\count316 -\c@savedoriglanguage=\count317 -\c@pageref=\count318 -\c@savedpageref=\count319 -\shorthandwidth=\skip79 -\shortjournalwidth=\skip80 -\shortserieswidth=\skip81 -\shorttitlewidth=\skip82 -\shortauthorwidth=\skip83 -\shorteditorwidth=\skip84 -\locallabelnumberwidth=\skip85 -\locallabelalphawidth=\skip86 -\localshorthandwidth=\skip87 -\localshortjournalwidth=\skip88 -\localshortserieswidth=\skip89 -\localshorttitlewidth=\skip90 -\localshortauthorwidth=\skip91 -\localshorteditorwidth=\skip92 +\c@afterword=\count185 +\c@savedafterword=\count186 +\c@annotator=\count187 +\c@savedannotator=\count188 +\c@author=\count189 +\c@savedauthor=\count190 +\c@bookauthor=\count191 +\c@savedbookauthor=\count192 +\c@commentator=\count193 +\c@savedcommentator=\count194 +\c@editor=\count195 +\c@savededitor=\count196 +\c@editora=\count197 +\c@savededitora=\count198 +\c@editorb=\count199 +\c@savededitorb=\count266 +\c@editorc=\count267 +\c@savededitorc=\count268 +\c@foreword=\count269 +\c@savedforeword=\count270 +\c@holder=\count271 +\c@savedholder=\count272 +\c@introduction=\count273 +\c@savedintroduction=\count274 +\c@namea=\count275 +\c@savednamea=\count276 +\c@nameb=\count277 +\c@savednameb=\count278 +\c@namec=\count279 +\c@savednamec=\count280 +\c@translator=\count281 +\c@savedtranslator=\count282 +\c@shortauthor=\count283 +\c@savedshortauthor=\count284 +\c@shorteditor=\count285 +\c@savedshorteditor=\count286 +\c@labelname=\count287 +\c@savedlabelname=\count288 +\c@institution=\count289 +\c@savedinstitution=\count290 +\c@lista=\count291 +\c@savedlista=\count292 +\c@listb=\count293 +\c@savedlistb=\count294 +\c@listc=\count295 +\c@savedlistc=\count296 +\c@listd=\count297 +\c@savedlistd=\count298 +\c@liste=\count299 +\c@savedliste=\count300 +\c@listf=\count301 +\c@savedlistf=\count302 +\c@location=\count303 +\c@savedlocation=\count304 +\c@organization=\count305 +\c@savedorganization=\count306 +\c@origlocation=\count307 +\c@savedoriglocation=\count308 +\c@origpublisher=\count309 +\c@savedorigpublisher=\count310 +\c@publisher=\count311 +\c@savedpublisher=\count312 +\c@language=\count313 +\c@savedlanguage=\count314 +\c@origlanguage=\count315 +\c@savedoriglanguage=\count316 +\c@pageref=\count317 +\c@savedpageref=\count318 +\shorthandwidth=\skip82 +\shortjournalwidth=\skip83 +\shortserieswidth=\skip84 +\shorttitlewidth=\skip85 +\shortauthorwidth=\skip86 +\shorteditorwidth=\skip87 Package biblatex Info: Trying to load compatibility code... Package biblatex Info: ... file 'blx-compat.def' found. (/usr/share/texlive/texmf-dist/tex/latex/biblatex/blx-compat.def -File: blx-compat.def 2018/03/04 v3.11 biblatex compatibility (PK/MW) +File: blx-compat.def 2017/12/19 v3.10 biblatex compatibility (PK/JW/AB) ) Package biblatex Info: Trying to load generic definitions... Package biblatex Info: ... file 'biblatex.def' found. (/usr/share/texlive/texmf-dist/tex/latex/biblatex/biblatex.def -File: biblatex.def 2018/03/04 v3.11 biblatex compatibility (PK/MW) -\c@textcitecount=\count320 -\c@textcitetotal=\count321 -\c@textcitemaxnames=\count322 -\c@biburlnumpenalty=\count323 -\c@biburlucpenalty=\count324 -\c@biburllcpenalty=\count325 -\c@smartand=\count326 +File: biblatex.def 2017/12/19 v3.10 biblatex compatibility (PK/JW/AB) +\c@textcitecount=\count319 +\c@textcitetotal=\count320 +\c@textcitemaxnames=\count321 +\c@biburlnumpenalty=\count322 +\c@biburlucpenalty=\count323 +\c@biburllcpenalty=\count324 +\c@smartand=\count325 ) Package biblatex Info: Trying to load bibliography style 'ieee'... Package biblatex Info: ... file 'ieee.bbx' found. (/usr/share/texlive/texmf-dist/tex/latex/biblatex-ieee/ieee.bbx -File: ieee.bbx 2018/08/20 v1.3 biblatex bibliography style +File: ieee.bbx 2017/03/27 v1.2d biblatex bibliography style Package biblatex Info: Trying to load bibliography style 'numeric-comp'... Package biblatex Info: ... file 'numeric-comp.bbx' found. (/usr/share/texlive/texmf-dist/tex/latex/biblatex/bbx/numeric-comp.bbx -File: numeric-comp.bbx 2018/03/04 v3.11 biblatex bibliography style (PK/MW) +File: numeric-comp.bbx 2017/12/19 v3.10 biblatex bibliography style (PK/JW/AB) Package biblatex Info: Trying to load bibliography style 'numeric'... Package biblatex Info: ... file 'numeric.bbx' found. (/usr/share/texlive/texmf-dist/tex/latex/biblatex/bbx/numeric.bbx -File: numeric.bbx 2018/03/04 v3.11 biblatex bibliography style (PK/MW) +File: numeric.bbx 2017/12/19 v3.10 biblatex bibliography style (PK/JW/AB) Package biblatex Info: Trying to load bibliography style 'standard'... Package biblatex Info: ... file 'standard.bbx' found. (/usr/share/texlive/texmf-dist/tex/latex/biblatex/bbx/standard.bbx -File: standard.bbx 2018/03/04 v3.11 biblatex bibliography style (PK/MW) -\c@bbx:relatedcount=\count327 -\c@bbx:relatedtotal=\count328 +File: standard.bbx 2017/12/19 v3.10 biblatex bibliography style (PK/JW/AB) +\c@bbx:relatedcount=\count326 +\c@bbx:relatedtotal=\count327 )))) Package biblatex Info: Trying to load citation style 'ieee'... Package biblatex Info: ... file 'ieee.cbx' found. (/usr/share/texlive/texmf-dist/tex/latex/biblatex-ieee/ieee.cbx -File: ieee.cbx 2018/08/20 v1.3 biblatex citation style +File: ieee.cbx 2017/03/27 v1.2d biblatex citation style Package biblatex Info: Trying to load citation style 'numeric-comp'... Package biblatex Info: ... file 'numeric-comp.cbx' found. (/usr/share/texlive/texmf-dist/tex/latex/biblatex/cbx/numeric-comp.cbx -File: numeric-comp.cbx 2018/03/04 v3.11 biblatex citation style (PK/MW) -\c@cbx@tempcnta=\count329 -\c@cbx@tempcntb=\count330 +File: numeric-comp.cbx 2017/12/19 v3.10 biblatex citation style (PK/JW/AB) +\c@cbx@tempcnta=\count328 +\c@cbx@tempcntb=\count329 Package biblatex Info: Redefining '\cite'. Package biblatex Info: Redefining '\parencite'. Package biblatex Info: Redefining '\footcite'. @@ -750,15 +749,13 @@ Package: hycolor 2016/05/16 v1.8 Color options for hyperref/bookmark (HO) Package: auxhook 2016/05/16 v1.4 Hooks for auxiliary files (HO) ) \@linkdim=\dimen153 -\Hy@linkcounter=\count331 -\Hy@pagecounter=\count332 +\Hy@linkcounter=\count330 +\Hy@pagecounter=\count331 (/usr/share/texlive/texmf-dist/tex/latex/hyperref/pd1enc.def File: pd1enc.def 2018/02/06 v6.86b Hyperref: PDFDocEncoding definition (HO) -Now handling font encoding PD1 ... -... no UTF-8 mapping file for font encoding PD1 ) -\Hy@SavedSpaceFactor=\count333 +\Hy@SavedSpaceFactor=\count332 (/usr/share/texlive/texmf-dist/tex/latex/latexconfig/hyperref.cfg File: hyperref.cfg 2002/06/06 v1.2 hyperref configuration of TeXLive @@ -767,8 +764,6 @@ Package hyperref Info: Option `unicode' set `true' on input line 4383. (/usr/share/texlive/texmf-dist/tex/latex/hyperref/puenc.def File: puenc.def 2018/02/06 v6.86b Hyperref: PDF Unicode definition (HO) -Now handling font encoding PU ... -... no UTF-8 mapping file for font encoding PU ) Package hyperref Info: Hyper figures OFF on input line 4509. Package hyperref Info: Link nesting OFF on input line 4514. @@ -777,10 +772,10 @@ Package hyperref Info: Plain pages OFF on input line 4524. Package hyperref Info: Backreferencing OFF on input line 4529. Package hyperref Info: Implicit mode ON; LaTeX internals redefined. Package hyperref Info: Bookmarks ON on input line 4762. -\c@Hy@tempcnt=\count334 +\c@Hy@tempcnt=\count333 LaTeX Info: Redefining \url on input line 5115. \XeTeXLinkMargin=\dimen154 -\Fld@menulength=\count335 +\Fld@menulength=\count334 \Field@Width=\dimen155 \Fld@charsize=\dimen156 Package hyperref Info: Hyper figures OFF on input line 6369. @@ -792,68 +787,65 @@ Package hyperref Info: Link coloring with OCG OFF on input line 6394. Package hyperref Info: PDF/A mode OFF on input line 6399. LaTeX Info: Redefining \ref on input line 6439. LaTeX Info: Redefining \pageref on input line 6443. -\Hy@abspage=\count336 -\c@Item=\count337 -\c@Hfootnote=\count338 +\Hy@abspage=\count335 +\c@Item=\count336 +\c@Hfootnote=\count337 ) Package hyperref Info: Driver (autodetected): hpdftex. (/usr/share/texlive/texmf-dist/tex/latex/hyperref/hpdftex.def File: hpdftex.def 2018/02/06 v6.86b Hyperref driver for pdfTeX -\Fld@listcount=\count339 -\c@bookmark@seq@number=\count340 +\Fld@listcount=\count338 +\c@bookmark@seq@number=\count339 (/usr/share/texlive/texmf-dist/tex/latex/oberdiek/rerunfilecheck.sty Package: rerunfilecheck 2016/05/16 v1.8 Rerun checks for auxiliary files (HO) Package uniquecounter Info: New unique counter `rerunfilecheck' on input line 2 82. ) -\Hy@SectionHShift=\skip93 +\Hy@SectionHShift=\skip88 ) - -Package biblatex Warning: File 'english-ieee.lbx' not found! -(biblatex) Ignoring mapping 'english' -> 'english-ieee'. - Package biblatex Info: Trying to load language 'english'... Package biblatex Info: ... file 'english.lbx' found. + (/usr/share/texlive/texmf-dist/tex/latex/biblatex/lbx/english.lbx -File: english.lbx 2018/03/04 v3.11 biblatex localization (PK/MW) +File: english.lbx 2017/12/19 v3.10 biblatex localization (PK/JW/AB) ) -\@quotelevel=\count341 -\@quotereset=\count342 +\@quotelevel=\count340 +\@quotereset=\count341 (./document.aux) \openout1 = `document.aux'. -LaTeX Font Info: Checking defaults for OML/cmm/m/it on input line 47. -LaTeX Font Info: ... okay on input line 47. -LaTeX Font Info: Checking defaults for T1/cmr/m/n on input line 47. -LaTeX Font Info: ... okay on input line 47. -LaTeX Font Info: Checking defaults for OT1/cmr/m/n on input line 47. -LaTeX Font Info: ... okay on input line 47. -LaTeX Font Info: Checking defaults for OMS/cmsy/m/n on input line 47. -LaTeX Font Info: ... okay on input line 47. -LaTeX Font Info: Checking defaults for OMX/cmex/m/n on input line 47. -LaTeX Font Info: ... okay on input line 47. -LaTeX Font Info: Checking defaults for U/cmr/m/n on input line 47. -LaTeX Font Info: ... okay on input line 47. -LaTeX Font Info: Checking defaults for PD1/pdf/m/n on input line 47. -LaTeX Font Info: ... okay on input line 47. -LaTeX Font Info: Checking defaults for PU/pdf/m/n on input line 47. -LaTeX Font Info: ... okay on input line 47. +LaTeX Font Info: Checking defaults for OML/cmm/m/it on input line 63. +LaTeX Font Info: ... okay on input line 63. +LaTeX Font Info: Checking defaults for T1/cmr/m/n on input line 63. +LaTeX Font Info: ... okay on input line 63. +LaTeX Font Info: Checking defaults for OT1/cmr/m/n on input line 63. +LaTeX Font Info: ... okay on input line 63. +LaTeX Font Info: Checking defaults for OMS/cmsy/m/n on input line 63. +LaTeX Font Info: ... okay on input line 63. +LaTeX Font Info: Checking defaults for OMX/cmex/m/n on input line 63. +LaTeX Font Info: ... okay on input line 63. +LaTeX Font Info: Checking defaults for U/cmr/m/n on input line 63. +LaTeX Font Info: ... okay on input line 63. +LaTeX Font Info: Checking defaults for PD1/pdf/m/n on input line 63. +LaTeX Font Info: ... okay on input line 63. +LaTeX Font Info: Checking defaults for PU/pdf/m/n on input line 63. +LaTeX Font Info: ... okay on input line 63. (/usr/share/texlive/texmf-dist/tex/context/base/mkii/supp-pdf.mkii [Loading MPS to PDF converter (version 2006.09.02).] -\scratchcounter=\count343 +\scratchcounter=\count342 \scratchdimen=\dimen157 -\scratchbox=\box39 -\nofMPsegments=\count344 -\nofMParguments=\count345 +\scratchbox=\box37 +\nofMPsegments=\count343 +\nofMParguments=\count344 \everyMPshowfont=\toks39 -\MPscratchCnt=\count346 +\MPscratchCnt=\count345 \MPscratchDim=\dimen158 -\MPnumerator=\count347 -\makeMPintoPDFobject=\count348 +\MPnumerator=\count346 +\makeMPintoPDFobject=\count347 \everyMPtoPDFconversion=\toks40 ) (/usr/share/texlive/texmf-dist/tex/latex/oberdiek/epstopdf-base.sty Package: epstopdf-base 2016/05/15 v2.6 Base part for package epstopdf @@ -882,7 +874,7 @@ Package: ltcaption 2013/06/09 v1.4-94 longtable captions (AR) ) Package caption Info: supertabular package is loaded. Package caption Info: End \AtBeginDocument code. -\AtBeginShipoutBox=\box40 +\AtBeginShipoutBox=\box38 *geometry* driver: auto-detecting *geometry* detected driver: pdftex @@ -918,58 +910,116 @@ Package caption Info: End \AtBeginDocument code. * \@reversemarginfalse * (1in=72.27pt=25.4mm, 1cm=28.453pt) -Package biblatex Info: Input encoding 'utf8' detected. +Package biblatex Info: No input encoding detected. +(biblatex) Assuming 'ascii'. Package biblatex Info: Automatic encoding selection. -(biblatex) Assuming data encoding 'utf8'. +(biblatex) Assuming data encoding 'ascii'. \openout4 = `document.bcf'. Package biblatex Info: Trying to load bibliographic data... Package biblatex Info: ... file 'document.bbl' found. -(./document.bbl) -Package biblatex Info: Reference section=0 on input line 47. -Package biblatex Info: Reference segment=0 on input line 47. -Package hyperref Info: Link coloring OFF on input line 47. +(./document.bbl + +Package biblatex Warning: Biber reported the following issues +(biblatex) with '14': +(biblatex) - Name "Mairal, J., Ponce, J., Sapiro, G., Zisserman, + A." has too many commas: skipping name. + +) +Package biblatex Info: Reference section=0 on input line 63. +Package biblatex Info: Reference segment=0 on input line 63. +Package hyperref Info: Link coloring OFF on input line 63. (/usr/share/texlive/texmf-dist/tex/latex/hyperref/nameref.sty Package: nameref 2016/05/21 v2.44 Cross-referencing by name of section (/usr/share/texlive/texmf-dist/tex/generic/oberdiek/gettitlestring.sty Package: gettitlestring 2016/05/16 v1.5 Cleanup title references (HO) ) -\c@section@level=\count349 +\c@section@level=\count348 ) -LaTeX Info: Redefining \ref on input line 47. -LaTeX Info: Redefining \pageref on input line 47. -LaTeX Info: Redefining \nameref on input line 47. +LaTeX Info: Redefining \ref on input line 63. +LaTeX Info: Redefining \pageref on input line 63. +LaTeX Info: Redefining \nameref on input line 63. (./document.out) (./document.out) \@outlinefile=\write5 \openout5 = `document.out'. -LaTeX Font Info: Try loading font information for U+msa on input line 50. + +File: images/reading_logo.png Graphic file (type png) + +Package pdftex.def Info: images/reading_logo.png used on input line 66. +(pdftex.def) Requested size: 170.71916pt x 56.9057pt. +LaTeX Font Info: Try loading font information for U+msa on input line 66. (/usr/share/texlive/texmf-dist/tex/latex/amsfonts/umsa.fd File: umsa.fd 2013/01/14 v3.01 AMS symbols A ) -LaTeX Font Info: Try loading font information for U+msb on input line 50. +LaTeX Font Info: Try loading font information for U+msb on input line 66. (/usr/share/texlive/texmf-dist/tex/latex/amsfonts/umsb.fd File: umsb.fd 2013/01/14 v3.01 AMS symbols B ) [1 -{/var/lib/texmf/fonts/map/pdftex/updmap/pdftex.map}] [2] [3] (./document.toc [4 -]) +{/var/lib/texmf/fonts/map/pdftex/updmap/pdftex.map} <./images/reading_logo.png> +] [2] [3] +(./document.toc [4]) \tf@toc=\write6 \openout6 = `document.toc'. - [5] [6] -[7] [8] [9] [10] [11] [12] [13] [14] [15] [16] + [5] [6] [7] [8] [9] +LaTeX Font Info: Try loading font information for OMS+cmr on input line 199. + + +(/usr/share/texlive/texmf-dist/tex/latex/base/omscmr.fd +File: omscmr.fd 2014/09/29 v2.5h Standard LaTeX font definitions +) +LaTeX Font Info: Font shape `OMS/cmr/m/n' in size <10> not available +(Font) Font shape `OMS/cmsy/m/n' tried instead on input line 199. + [10] [11] +Missing character: There is no â in font cmr10! +Missing character: There is no € in font cmr10! +Missing character: There is no ™ in font cmr10! + +File: images/perceptron.png Graphic file (type png) + +Package pdftex.def Info: images/perceptron.png used on input line 237. +(pdftex.def) Requested size: 284.52713pt x 170.72142pt. + [12] + +File: images/rnn_ffn.png Graphic file (type png) + +Package pdftex.def Info: images/rnn_ffn.png used on input line 257. +(pdftex.def) Requested size: 426.80307pt x 170.72112pt. + +[13 <./images/perceptron.png>] + +File: images/lstm.png Graphic file (type png) + +Package pdftex.def Info: images/lstm.png used on input line 275. +(pdftex.def) Requested size: 284.52357pt x 227.62pt. + [14 <./images/rnn_ffn.png>] [15 <./images/lstm.png>] [16] [17] [18] [19] [20] +[21] +Overfull \hbox (1.46323pt too wide) in paragraph at lines 367--367 +\OT1/cmr/m/n/10 [On-line]. Avail-able: []$\OT1/cmtt/m/n/10 https : / / www . co +deproject . com / Articles / 1201444 / Stock -[] Predictions -[] + [] + + +Overfull \hbox (22.26572pt too wide) in paragraph at lines 367--367 +\OT1/cmr/m/n/10 able: []$\OT1/cmtt/m/n/10 https : / / towardsdatascience . com +/ recurrent -[] neural -[] networks -[] and -[] lstm -[] 4b601dd822a5$[]\OT1/cm +r/m/n/10 . + [] + +[22] [23] pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve rsion <1.7>, but at most version <1.5> allowed - + File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf used on input line 215. +Package pdftex.def Info: PID.pdf used on input line 372. (pdftex.def) Requested size: 597.551pt x 845.07512pt. @@ -977,7 +1027,7 @@ pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve rsion <1.7>, but at most version <1.5> allowed File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf used on input line 215. +Package pdftex.def Info: PID.pdf used on input line 372. (pdftex.def) Requested size: 597.551pt x 845.07512pt. @@ -987,247 +1037,250 @@ rsion <1.7>, but at most version <1.5> allowed pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve rsion <1.7>, but at most version <1.5> allowed - + File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf , page1 used on input line 215. +Package pdftex.def Info: PID.pdf , page1 used on input line 372. (pdftex.def) Requested size: 597.551pt x 845.07512pt. File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf , page1 used on input line 215. +Package pdftex.def Info: PID.pdf , page1 used on input line 372. (pdftex.def) Requested size: 562.1644pt x 795.0303pt. -[17] +[24] File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf , page1 used on input line 215. +Package pdftex.def Info: PID.pdf , page1 used on input line 372. (pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf , page1 used on input line 215. +Package pdftex.def Info: PID.pdf , page1 used on input line 372. (pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf , page1 used on input line 215. +Package pdftex.def Info: PID.pdf , page1 used on input line 372. (pdftex.def) Requested size: 562.1644pt x 795.0303pt. - [18 <./PID.pdf>] + [25 <./PID.pdf>] pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve rsion <1.7>, but at most version <1.5> allowed - + File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf , page2 used on input line 215. +Package pdftex.def Info: PID.pdf , page2 used on input line 372. (pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf , page2 used on input line 215. +Package pdftex.def Info: PID.pdf , page2 used on input line 372. (pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) -Package pdftex.def Info: PID.pdf , page2 used on input line 215. +Package pdftex.def Info: PID.pdf , page2 used on input line 372. (pdftex.def) Requested size: 562.1644pt x 795.0303pt. -[19 <./PID.pdf>] - -pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve -rsion <1.7>, but at most version <1.5> allowed - -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page3 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page3 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page3 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -[20 <./PID.pdf>] - -pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve -rsion <1.7>, but at most version <1.5> allowed - -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page4 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page4 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page4 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -[21 <./PID.pdf>] - -pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve -rsion <1.7>, but at most version <1.5> allowed - -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page5 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page5 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page5 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -[22 <./PID.pdf>] - -pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve -rsion <1.7>, but at most version <1.5> allowed - -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page6 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page6 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page6 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -[23 <./PID.pdf>] - -pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve -rsion <1.7>, but at most version <1.5> allowed - -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page7 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page7 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page7 used on input line 215. -(pdftex.def) Requested size: 562.1644pt x 795.0303pt. -[24 <./PID.pdf>] - -pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve -rsion <1.7>, but at most version <1.5> allowed - -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page8 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page8 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page8 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. -[25 <./PID.pdf>] - -pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve -rsion <1.7>, but at most version <1.5> allowed - -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page9 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page9 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. -File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page9 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. [26 <./PID.pdf>] pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve rsion <1.7>, but at most version <1.5> allowed - + File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page10 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page3 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page10 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page3 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page10 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page3 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. [27 <./PID.pdf>] pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve rsion <1.7>, but at most version <1.5> allowed - + File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page11 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page4 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page11 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page4 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page11 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page4 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. [28 <./PID.pdf>] pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve rsion <1.7>, but at most version <1.5> allowed - + File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page12 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page5 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page12 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page5 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. File: PID.pdf Graphic file (type pdf) - -Package pdftex.def Info: PID.pdf , page12 used on input line 215. -(pdftex.def) Requested size: 795.0303pt x 562.1644pt. + +Package pdftex.def Info: PID.pdf , page5 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. [29 <./PID.pdf>] -Package atveryend Info: Empty hook `BeforeClearDocument' on input line 219. - [30] -Package atveryend Info: Empty hook `AfterLastShipout' on input line 219. + +pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve +rsion <1.7>, but at most version <1.5> allowed + +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page6 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page6 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page6 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. +[30 <./PID.pdf>] + +pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve +rsion <1.7>, but at most version <1.5> allowed + +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page7 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page7 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page7 used on input line 372. +(pdftex.def) Requested size: 562.1644pt x 795.0303pt. +[31 <./PID.pdf>] + +pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve +rsion <1.7>, but at most version <1.5> allowed + +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page8 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page8 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page8 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +[32 <./PID.pdf>] + +pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve +rsion <1.7>, but at most version <1.5> allowed + +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page9 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page9 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page9 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +[33 <./PID.pdf>] + +pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve +rsion <1.7>, but at most version <1.5> allowed + +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page10 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page10 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page10 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +[34 <./PID.pdf>] + +pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve +rsion <1.7>, but at most version <1.5> allowed + +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page11 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page11 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page11 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +[35 <./PID.pdf>] + +pdfTeX warning: /usr/bin/pdflatex (file ./PID.pdf): PDF inclusion: found PDF ve +rsion <1.7>, but at most version <1.5> allowed + +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page12 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page12 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +File: PID.pdf Graphic file (type pdf) + +Package pdftex.def Info: PID.pdf , page12 used on input line 372. +(pdftex.def) Requested size: 795.0303pt x 562.1644pt. +[36 <./PID.pdf>] +Package atveryend Info: Empty hook `BeforeClearDocument' on input line 376. + [37] +Package atveryend Info: Empty hook `AfterLastShipout' on input line 376. (./document.aux) -Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 219. -Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 219. +Package atveryend Info: Executing hook `AtVeryEndDocument' on input line 376. +Package atveryend Info: Executing hook `AtEndAfterFileList' on input line 376. Package rerunfilecheck Info: File `document.out' has not changed. -(rerunfilecheck) Checksum: A5AFC2730D2DEC485257C5D5E44439CB;5994. +(rerunfilecheck) Checksum: 30CCF589C480B87162EBF723A1651341;8362. Package logreq Info: Writing requests to 'document.run.xml'. \openout1 = `document.run.xml'. ) Here is how much of TeX's memory you used: - 22372 strings out of 494586 - 354283 string characters out of 6177802 - 847622 words of memory out of 5000000 - 25613 multiletter control sequences out of 15000+600000 + 22430 strings out of 492982 + 354743 string characters out of 6134895 + 844942 words of memory out of 5000000 + 25551 multiletter control sequences out of 15000+600000 12017 words of font info for 47 fonts, out of 8000000 for 9000 - 14 hyphenation exceptions out of 8191 - 45i,18n,67p,2008b,1215s stack positions out of 5000i,500n,10000p,200000b,80000s + 1141 hyphenation exceptions out of 8191 + 45i,18n,67p,2008b,1300s stack positions out of 5000i,500n,10000p,200000b,80000s -Output written on document.pdf (30 pages, 327776 bytes). +e/texlive/texmf-dist/fonts/type1/public/amsfonts/cm/cmmi10.pfb> +Output written on document.pdf (37 pages, 522816 bytes). PDF statistics: - 475 PDF objects out of 1000 (max. 8388607) - 411 compressed objects within 5 object streams - 84 named destinations out of 1000 (max. 500000) - 412 words of extra memory for PDF output out of 10000 (max. 10000000) + 689 PDF objects out of 1000 (max. 8388607) + 605 compressed objects within 7 object streams + 121 named destinations out of 1000 (max. 500000) + 528 words of extra memory for PDF output out of 10000 (max. 10000000) diff --git a/document.out b/document.out index bca230b..e87d778 100644 --- a/document.out +++ b/document.out @@ -8,35 +8,47 @@ \BOOKMARK [2][-]{section*.9}{\376\377\000P\000r\000o\000j\000e\000c\000t\000\040\000C\000o\000n\000s\000t\000r\000a\000i\000n\000t\000s}{section*.6}% 8 \BOOKMARK [1][-]{section*.10}{\376\377\000L\000i\000t\000e\000r\000a\000t\000u\000r\000e\000\040\000R\000e\000v\000i\000e\000w}{}% 9 \BOOKMARK [2][-]{section*.11}{\376\377\000E\000x\000i\000s\000t\000i\000n\000g\000\040\000T\000o\000o\000l\000s}{section*.10}% 10 -\BOOKMARK [2][-]{section*.12}{\376\377\000R\000e\000l\000a\000t\000e\000d\000\040\000W\000o\000r\000k}{section*.10}% 11 -\BOOKMARK [2][-]{section*.13}{\376\377\000T\000w\000e\000e\000t\000\040\000C\000o\000l\000l\000e\000c\000t\000i\000o\000n}{section*.10}% 12 -\BOOKMARK [2][-]{section*.14}{\376\377\000S\000e\000n\000t\000i\000m\000e\000n\000t\000\040\000A\000n\000a\000l\000y\000s\000i\000s}{section*.10}% 13 -\BOOKMARK [3][-]{section*.15}{\376\377\000A\000l\000g\000o\000r\000i\000t\000h\000m\000s}{section*.14}% 14 -\BOOKMARK [3][-]{section*.16}{\376\377\000T\000e\000c\000h\000n\000i\000q\000u\000e\000s}{section*.14}% 15 -\BOOKMARK [2][-]{section*.17}{\376\377\000N\000e\000u\000r\000a\000l\000\040\000N\000e\000t\000w\000o\000r\000k\000s}{section*.10}% 16 -\BOOKMARK [3][-]{section*.18}{\376\377\000T\000y\000p\000e\000s}{section*.17}% 17 -\BOOKMARK [3][-]{section*.19}{\376\377\000L\000S\000T\000M\000s}{section*.17}% 18 -\BOOKMARK [2][-]{section*.20}{\376\377\000M\000a\000c\000h\000i\000n\000e\000\040\000L\000e\000a\000r\000n\000i\000n\000g}{section*.10}% 19 -\BOOKMARK [3][-]{section*.21}{\376\377\000L\000o\000g\000i\000s\000t\000i\000c\000a\000l\000\040\000R\000e\000g\000r\000e\000s\000s\000i\000o\000n}{section*.20}% 20 -\BOOKMARK [1][-]{section*.22}{\376\377\000S\000o\000l\000u\000t\000i\000o\000n\000\040\000A\000p\000p\000r\000o\000a\000c\000h}{}% 21 -\BOOKMARK [2][-]{section*.23}{\376\377\000S\000o\000l\000u\000t\000i\000o\000n\000\040\000S\000u\000m\000m\000a\000r\000y}{section*.22}% 22 -\BOOKMARK [2][-]{section*.24}{\376\377\000D\000a\000t\000a\000\040\000f\000l\000o\000w\000\040\000O\000v\000e\000r\000v\000i\000e\000w}{section*.22}% 23 -\BOOKMARK [2][-]{section*.25}{\376\377\000P\000a\000c\000k\000a\000g\000e\000s\000,\000\040\000T\000o\000o\000l\000s\000\040\000a\000n\000d\000\040\000T\000e\000c\000h\000n\000i\000q\000u\000e\000s}{section*.22}% 24 -\BOOKMARK [1][-]{section*.26}{\376\377\000S\000y\000s\000t\000e\000m\000\040\000D\000e\000s\000i\000g\000n\000\040\000a\000n\000d\000\040\000I\000m\000p\000l\000e\000m\000e\000n\000t\000a\000t\000i\000o\000n}{}% 25 -\BOOKMARK [2][-]{section*.27}{\376\377\000D\000a\000t\000a\000\040\000c\000o\000l\000l\000e\000c\000t\000i\000o\000n}{section*.26}% 26 -\BOOKMARK [2][-]{section*.28}{\376\377\000D\000a\000t\000a\000\040\000p\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section*.26}% 27 -\BOOKMARK [3][-]{section*.29}{\376\377\000P\000r\000e\000p\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section*.28}% 28 -\BOOKMARK [3][-]{section*.33}{\376\377\000S\000p\000a\000m\000\040\000F\000i\000l\000t\000e\000r\000i\000n\000g}{section*.28}% 29 -\BOOKMARK [2][-]{section*.39}{\376\377\000S\000e\000n\000t\000i\000m\000e\000n\000t\000\040\000A\000n\000a\000l\000y\000s\000i\000s}{section*.26}% 30 -\BOOKMARK [3][-]{section*.40}{\376\377\000V\000A\000D\000E\000R}{section*.39}% 31 -\BOOKMARK [1][-]{section*.41}{\376\377\000T\000e\000s\000t\000i\000n\000g\000:\000\040\000V\000e\000r\000i\000f\000i\000c\000a\000t\000i\000o\000n\000\040\000a\000n\000d\000\040\000R\000e\000f\000l\000e\000c\000t\000i\000o\000n}{}% 32 -\BOOKMARK [1][-]{section*.42}{\376\377\000D\000i\000s\000c\000u\000s\000s\000i\000o\000n\000:\000\040\000C\000o\000n\000t\000r\000i\000b\000u\000t\000i\000o\000n\000\040\000a\000n\000d\000\040\000R\000e\000f\000l\000e\000c\000t\000i\000o\000n}{}% 33 -\BOOKMARK [2][-]{section*.43}{\376\377\000L\000i\000m\000i\000t\000a\000t\000i\000o\000n\000s}{section*.42}% 34 -\BOOKMARK [1][-]{section*.44}{\376\377\000S\000o\000c\000i\000a\000l\000,\000\040\000L\000e\000g\000a\000l\000\040\000a\000n\000d\000\040\000E\000t\000h\000i\000c\000a\000l\000\040\000I\000s\000s\000u\000e\000s}{}% 35 -\BOOKMARK [1][-]{section*.45}{\376\377\000C\000o\000n\000c\000l\000u\000s\000i\000o\000n\000\040\000a\000n\000d\000\040\000F\000u\000t\000u\000r\000e\000\040\000I\000m\000p\000r\000o\000v\000e\000m\000e\000n\000t\000s}{}% 36 -\BOOKMARK [2][-]{section*.46}{\376\377\000C\000o\000n\000c\000l\000u\000s\000i\000o\000n}{section*.45}% 37 -\BOOKMARK [2][-]{section*.47}{\376\377\000F\000u\000t\000u\000r\000e\000\040\000I\000m\000p\000r\000o\000v\000e\000m\000e\000n\000t\000s}{section*.45}% 38 -\BOOKMARK [1][-]{section*.48}{\376\377\000R\000e\000f\000e\000r\000e\000n\000c\000e\000s}{}% 39 -\BOOKMARK [1][-]{section*.50}{\376\377\000A\000p\000p\000e\000n\000d\000i\000c\000e\000s}{}% 40 -\BOOKMARK [2][-]{section*.51}{\376\377\000A\000p\000p\000e\000n\000d\000i\000x\000\040\000A\000\040\000-\000\040\000P\000r\000o\000j\000e\000c\000t\000\040\000I\000n\000i\000t\000i\000a\000t\000i\000o\000n\000\040\000D\000o\000c\000u\000m\000e\000n\000t}{section*.50}% 41 -\BOOKMARK [2][-]{section*.52}{\376\377\000A\000p\000p\000e\000n\000d\000i\000x\000\040\000B\000\040\000-\000\040\000L\000o\000g\000\040\000b\000o\000o\000k}{section*.50}% 42 +\BOOKMARK [2][-]{section*.12}{\376\377\000R\000e\000l\000a\000t\000e\000d\000\040\000r\000e\000s\000e\000a\000r\000c\000h}{section*.10}% 11 +\BOOKMARK [2][-]{section*.13}{\376\377\000D\000a\000t\000a\000\040\000C\000o\000l\000l\000e\000c\000t\000i\000o\000n}{section*.10}% 12 +\BOOKMARK [3][-]{section*.14}{\376\377\000T\000w\000i\000t\000t\000e\000r\000\040\000a\000n\000d\000\040\000T\000w\000i\000t\000t\000e\000r\000\040\000A\000P\000I}{section*.13}% 13 +\BOOKMARK [3][-]{section*.15}{\376\377\000T\000w\000e\000e\000p\000y\000\040\000P\000y\000t\000h\000o\000n\000\040\000P\000a\000c\000k\000a\000g\000e}{section*.13}% 14 +\BOOKMARK [2][-]{section*.16}{\376\377\000S\000e\000n\000t\000i\000m\000e\000n\000t\000\040\000A\000n\000a\000l\000y\000s\000i\000s}{section*.10}% 15 +\BOOKMARK [3][-]{section*.17}{\376\377\000N\000a\000t\000u\000r\000a\000l\000\040\000L\000a\000n\000g\000u\000a\000g\000e\000\040\000P\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section*.16}% 16 +\BOOKMARK [3][-]{section*.18}{\376\377\000V\000a\000l\000e\000n\000c\000e\000\040\000A\000w\000a\000r\000e\000\040\000D\000i\000c\000t\000i\000o\000n\000a\000r\000y\000\040\000a\000n\000d\000\040\000s\000E\000n\000t\000i\000m\000e\000n\000t\000\040\000R\000e\000a\000s\000o\000n\000i\000n\000g}{section*.16}% 17 +\BOOKMARK [2][-]{section*.19}{\376\377\000N\000e\000u\000r\000a\000l\000\040\000N\000e\000t\000w\000o\000r\000k\000s}{section*.10}% 18 +\BOOKMARK [3][-]{section*.20}{\376\377\000R\000e\000c\000u\000r\000r\000e\000n\000t\000\040\000N\000e\000u\000r\000a\000l\000\040\000N\000e\000t\000w\000o\000r\000k\000\040\000\050\000R\000N\000N\000\051}{section*.19}% 19 +\BOOKMARK [3][-]{section*.21}{\376\377\000L\000o\000n\000g\000-\000S\000h\000o\000r\000t\000\040\000T\000e\000r\000m\000\040\000M\000e\000m\000o\000r\000y\000\040\000\050\000L\000S\000T\000M\000\051}{section*.19}% 20 +\BOOKMARK [3][-]{section*.22}{\376\377\000K\000e\000r\000a\000\040\000a\000n\000d\000\040\000T\000e\000n\000s\000o\000r\000F\000l\000o\000w}{section*.19}% 21 +\BOOKMARK [3][-]{section*.23}{\376\377\000O\000p\000t\000i\000m\000i\000s\000e\000r\000s}{section*.19}% 22 +\BOOKMARK [3][-]{section*.24}{\376\377\000L\000o\000s\000s}{section*.19}% 23 +\BOOKMARK [3][-]{section*.25}{\376\377\000R\000e\000g\000u\000l\000a\000r\000i\000s\000a\000t\000i\000o\000n\000\040\000-\000\040\000D\000r\000o\000p\000o\000u\000t\000s}{section*.19}% 24 +\BOOKMARK [2][-]{section*.26}{\376\377\000M\000a\000c\000h\000i\000n\000e\000\040\000L\000e\000a\000r\000n\000i\000n\000g}{section*.10}% 25 +\BOOKMARK [3][-]{section*.27}{\376\377\000N\000a\000i\000v\000e\000\040\000B\000a\000y\000e\000s}{section*.26}% 26 +\BOOKMARK [2][-]{section*.28}{\376\377\000O\000t\000h\000e\000r\000\040\000T\000e\000c\000h\000n\000i\000q\000u\000e\000s}{section*.10}% 27 +\BOOKMARK [3][-]{section*.29}{\376\377\000N\000g\000r\000a\000m\000s}{section*.28}% 28 +\BOOKMARK [3][-]{section*.30}{\376\377\000S\000c\000o\000r\000i\000n\000g\000\040\000a\000n\000d\000\040\000V\000a\000l\000i\000d\000a\000t\000i\000o\000n}{section*.28}% 29 +\BOOKMARK [1][-]{section*.31}{\376\377\000S\000o\000l\000u\000t\000i\000o\000n\000\040\000A\000p\000p\000r\000o\000a\000c\000h}{}% 30 +\BOOKMARK [2][-]{section*.32}{\376\377\000S\000o\000l\000u\000t\000i\000o\000n\000\040\000S\000u\000m\000m\000a\000r\000y}{section*.31}% 31 +\BOOKMARK [2][-]{section*.33}{\376\377\000D\000a\000t\000a\000\040\000f\000l\000o\000w\000\040\000O\000v\000e\000r\000v\000i\000e\000w}{section*.31}% 32 +\BOOKMARK [2][-]{section*.34}{\376\377\000P\000a\000c\000k\000a\000g\000e\000s\000,\000\040\000T\000o\000o\000l\000s\000\040\000a\000n\000d\000\040\000T\000e\000c\000h\000n\000i\000q\000u\000e\000s}{section*.31}% 33 +\BOOKMARK [1][-]{section*.35}{\376\377\000S\000y\000s\000t\000e\000m\000\040\000D\000e\000s\000i\000g\000n\000\040\000a\000n\000d\000\040\000I\000m\000p\000l\000e\000m\000e\000n\000t\000a\000t\000i\000o\000n}{}% 34 +\BOOKMARK [2][-]{section*.36}{\376\377\000D\000a\000t\000a\000\040\000c\000o\000l\000l\000e\000c\000t\000i\000o\000n}{section*.35}% 35 +\BOOKMARK [3][-]{section*.37}{\376\377\000P\000r\000i\000c\000e\000\040\000T\000i\000m\000e\000-\000s\000e\000r\000i\000e\000s\000\040\000D\000a\000t\000a}{section*.36}% 36 +\BOOKMARK [2][-]{section*.38}{\376\377\000D\000a\000t\000a\000\040\000p\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section*.35}% 37 +\BOOKMARK [3][-]{section*.39}{\376\377\000P\000r\000e\000p\000r\000o\000c\000e\000s\000s\000i\000n\000g}{section*.38}% 38 +\BOOKMARK [3][-]{section*.43}{\376\377\000S\000p\000a\000m\000\040\000F\000i\000l\000t\000e\000r\000i\000n\000g}{section*.38}% 39 +\BOOKMARK [2][-]{section*.46}{\376\377\000S\000e\000n\000t\000i\000m\000e\000n\000t\000\040\000A\000n\000a\000l\000y\000s\000i\000s}{section*.35}% 40 +\BOOKMARK [3][-]{section*.47}{\376\377\000V\000A\000D\000E\000R}{section*.46}% 41 +\BOOKMARK [2][-]{section*.48}{\376\377\000R\000e\000c\000u\000r\000r\000e\000n\000t\000\040\000N\000e\000u\000r\000a\000l\000\040\000N\000e\000t\000w\000o\000r\000k}{section*.35}% 42 +\BOOKMARK [3][-]{section*.49}{\376\377\000T\000r\000a\000i\000n\000i\000n\000g\000\040\000a\000n\000d\000\040\000T\000e\000s\000t\000i\000n\000g\000\040\000M\000o\000d\000e\000l}{section*.48}% 43 +\BOOKMARK [3][-]{section*.50}{\376\377\000V\000a\000l\000i\000d\000a\000t\000i\000o\000n}{section*.48}% 44 +\BOOKMARK [3][-]{section*.51}{\376\377\000F\000u\000t\000u\000r\000e\000\040\000P\000r\000e\000d\000i\000c\000t\000i\000o\000n\000\040\000F\000o\000r\000e\000c\000a\000s\000t\000i\000n\000g}{section*.48}% 45 +\BOOKMARK [1][-]{section*.52}{\376\377\000T\000e\000s\000t\000i\000n\000g\000:\000\040\000V\000e\000r\000i\000f\000i\000c\000a\000t\000i\000o\000n\000\040\000a\000n\000d\000\040\000R\000e\000f\000l\000e\000c\000t\000i\000o\000n}{}% 46 +\BOOKMARK [1][-]{section*.53}{\376\377\000D\000i\000s\000c\000u\000s\000s\000i\000o\000n\000:\000\040\000C\000o\000n\000t\000r\000i\000b\000u\000t\000i\000o\000n\000\040\000a\000n\000d\000\040\000R\000e\000f\000l\000e\000c\000t\000i\000o\000n}{}% 47 +\BOOKMARK [2][-]{section*.54}{\376\377\000L\000i\000m\000i\000t\000a\000t\000i\000o\000n\000s}{section*.53}% 48 +\BOOKMARK [1][-]{section*.55}{\376\377\000C\000o\000n\000c\000l\000u\000s\000i\000o\000n\000\040\000a\000n\000d\000\040\000F\000u\000t\000u\000r\000e\000\040\000I\000m\000p\000r\000o\000v\000e\000m\000e\000n\000t\000s}{}% 49 +\BOOKMARK [2][-]{section*.56}{\376\377\000C\000o\000n\000c\000l\000u\000s\000i\000o\000n}{section*.55}% 50 +\BOOKMARK [2][-]{section*.57}{\376\377\000F\000u\000t\000u\000r\000e\000\040\000I\000m\000p\000r\000o\000v\000e\000m\000e\000n\000t\000s}{section*.55}% 51 +\BOOKMARK [1][-]{section*.59}{\376\377\000A\000p\000p\000e\000n\000d\000i\000c\000e\000s}{}% 52 +\BOOKMARK [2][-]{section*.60}{\376\377\000A\000p\000p\000e\000n\000d\000i\000x\000\040\000A\000\040\000-\000\040\000P\000r\000o\000j\000e\000c\000t\000\040\000I\000n\000i\000t\000i\000a\000t\000i\000o\000n\000\040\000D\000o\000c\000u\000m\000e\000n\000t}{section*.59}% 53 +\BOOKMARK [2][-]{section*.61}{\376\377\000A\000p\000p\000e\000n\000d\000i\000x\000\040\000B\000\040\000-\000\040\000L\000o\000g\000\040\000b\000o\000o\000k}{section*.59}% 54 diff --git a/document.pdf b/document.pdf index b5971f8..f0e7cbf 100644 Binary files a/document.pdf and b/document.pdf differ diff --git a/document.run.xml b/document.run.xml index 082fc57..3941213 100644 --- a/document.run.xml +++ b/document.run.xml @@ -53,12 +53,12 @@ blx-dm.def blx-compat.def biblatex.def - standard.bbx - numeric.bbx - numeric-comp.bbx ieee.bbx - numeric-comp.cbx + numeric-comp.bbx + numeric.bbx + standard.bbx ieee.cbx + numeric-comp.cbx biblatex.cfg english.lbx diff --git a/document.synctex.gz b/document.synctex.gz index b36bad8..6069bf1 100644 Binary files a/document.synctex.gz and b/document.synctex.gz differ diff --git a/document.tex b/document.tex index 6f02585..5f4e846 100644 --- a/document.tex +++ b/document.tex @@ -21,6 +21,7 @@ \usepackage{pdflscape} \usepackage[acronym,toc]{glossaries} \usepackage[margin=1.2in]{geometry} +\usepackage{titling} \usepackage[style=ieee,backend=biber]{biblatex} \addbibresource{report.bib} @@ -37,6 +38,21 @@ \setlength{\parskip}{6pt plus 2pt minus 1pt} +\pretitle{% + \begin{center} + \huge + \includegraphics[width=6cm,height=2cm]{images/reading_logo.png}\\[\bigskipamount] + + } +\posttitle{ + + \large + School of Mathematical, Physical and Computational Sciences + + Indivdual Project - CS3IP16 +\end{center}} + + \title{Opinion Mining and Social Media Sentiment Analysis in the Prediction of Cryptocurrency Prices} \date{Submission date: Place Holder} \author{Student: Andrew Sotheran @@ -52,12 +68,20 @@ \begin{center} \section{Abstract}\label{abstract} \end{center} - The volatility of the stock markets is an aspect that is both hard to predict and to mitigate especially when relating to the cryptocurrency market. Cryptocurrency is highly volatile and which has attracted investors to attempt to make quick profits on the market. + The volatility of the stock markets is an aspect that is both hard to predict and to mitigate particularly when relating to the cryptocurrency market. Commodities such as cryptocurrencies are profoundly volatile and have attracted investors in an attempt to make quick profits on the market. These financial commodities are subject to the whim of public confidence and platforms such as Twitter and Facebook are most notably utilised to express opinions. Extrapolating sentiment from such platforms has been used to gain insight into topics across industries, thus applying it to crypto-market analysis could serve to show a relationship between public opinion and market change. + + This project looks into public perception of the cryptomarket, by analysing Bitcoin-related tweets per hour for sentiment changes that could indicate a correlation to market fluctuations in the near future. This is achieved by training a recurrent neural network on the severity changes of historical sentiment and price over the past year every hour. The predictions are then shifted forward in time by 1 hour to indicate the corresponding Bitcoin price interval. \newpage \begin{center} \section{Acknowledgements}\label{acknowledgements} + I would like to express my gratitude to Dr. Kenneth Boness for his continued support and guidance throughout this project. + + Secondly, I want to express gratitude to PhD. Jason Brownlee, of \href{machinelearningmastery.com}{Machine Learning Mastery} for having clear and thorough explanations of machine learning concepts and metrics. + + I would also like to thank my family for their support during the development of this project. + \end{center} \newpage @@ -76,6 +100,18 @@ Tweets - The name given for messages posted on the Twitter platform, which are restricted to 280 characters. + Hashtag - Is a keyword or phrase used to describe a topic and allows the tweets to be categorised. + + Fomo (Fear of Missing Out) - Is used to describe buying behaviour when stocks are moving suddenly and more buyers appear to enter all of a sudden. + + Shorting - Or short sale, is the sale of an asset that the investor buys shares and immediately sells them, hoping to make a profit from buying later at a lower price. + + Doubling Down - Is to take further risk on a stock by doubling effort/investment in a hope and attempt to raise the price + + RNN - Recurrent Neural Network + + LSTM - Long-Short Term Memory Neural Network + \newpage \begin{center} @@ -111,42 +147,160 @@ \subsection{Problem Statement}\label{statement} - The key problems this project will attempt to address are that of a public open-source system that aids in the analysis and prediction of BTC, the accuracy of open-source tools and technology when applied to trading market scene and to identify whether there is a correlation between Twitter sentiment and BTC price fluctuation. While there are tools out there only a few are available to the public and only provide basic functionality such as only sentiment analysis, while others are kept in-house of major corporations whom invest into this problem domain. + The key problems this project attempts to address are that of, an open-source system available to the public that aids in the analysis and prediction of BTC. The accuracy of open-source tools and technology when applied to the trading market scene and to identify whether there is a correlation between Twitter sentiment and BTC price fluctuation. While there are existing tools only a few are available to the public and only provide basic functionality, while others are kept in-house of major corporations who invest into this problem domain. - The other issue presented here is that assuming perfect accuracy can be achieved is naive. As this project will only be using existing tools and technologies thus, there are limitations to accuracy that can be obtained. One of that being the suitability of the tools, there are no open-source sentiment analysers for stock market prediction thus finding a specifically trained analyser for the chosen domain in highly unlikely. In relation, finding the most suitable machine learning or neural network is equally important as this will determine the accuracy of the predictions. + The other issue presented here is that assuming perfect accuracy can be achieved is naive. As this project will only be using existing tools and technologies thus, there are limitations to accuracy that can be obtained. One of that being the suitability of the tools, there are no open-source sentiment analysers for stock market prediction, thus finding a specifically trained analyser for the chosen domain in highly unlikely. In relation, finding the most suitable machine learning or neural network is equally important as this will determine the accuracy of the predictions and should be discussed in the literature review. - The accuracy and suitability of various machine learning methods and neural networks are a known issue in their respective domains, this investigation should be carried out to determine their suitability for their needed use in this project. + The accuracy and suitability of various machine learning methods and neural networks are a known issue in their respective domains, this investigation should be carried out to determine their suitability for their needed use in this project and should be detailed in the literature review. - This project will focus on the investigation of these technologies and whether it is feasible to predict the price of BTC based on historical price and the sentiment gathered from Twitter. The accuracy of the system will be compared to other technologies to identify limitations in the proposed solution and to determine the for other technologies if this is the case. - - A system will be created that will utilise + This project will focus on the investigation of these technologies and tools to justify whether it is feasible to predict the price of BTC based on historical price and the sentiment gathered from Twitter. Limitations of the system and it's accuracy in predictions should be investigated and discussed to determine the implemented solution is the more suitable compared to other methods. \subsection{Stakeholders}\label{stakeholders} + The stakeholders of this system would be those looking to invest in the cryptocurrency markets, in this projects regard, specifically into Bitcoin. Private, Business and public investors would benefit from an open-source system such as this as it will both provide a basis for decisions on whether to invest or not. + Public investors, general people looking to invest in the cryptocurrency market, will be those stakeholders that will benefit the most from such a system. Due to the lack of any open-source tools available, these stakeholders could be seen as being left in the dark when it comes to predicting the direction of Bitcoin where Businesses and Enterprises will have a one up, due to having an internal system for predictions. \subsection{Project Constraints}\label{constraints} + This project will not attempt to justify the accuracy of the chosen algorithm or tools over other algorithms. It will be discussed in the literature review the justifications made on why the chosen algorithm and tools have been used for this project over the others, but accuracy will not be directly compared. + + This project will only be coded to predict an hour ahead as the model will be trained on an hourly basis as the data is gathered per hour. Predictions for further in the future can be modelled but will be seen as a future improvement to the system. + + The detail of a interface may be subject of change through this project due to time contraints and the focus being the investigation of the impact social media has on market predictions. \newpage \begin{center} \section{Literature Review}\label{literature} \end{center} - \subsection{Existing Tools} - - \subsection{Related Work} - - \subsection{Tweet Collection}\label{tweet_collection} + \subsection{Existing Tools} + An aspect that this project will be attempting to address is that, at the time of writing, there are a limited amount of systems available to the public that either provide sentiment analysis or predictions of the crypto-market. Additionally, none known that combine both sentiment and price analysis to make said predictions on the direction of the market. - \subsection{Sentiment Analysis}\label{sentiment} - - \subsubsection{Algorithms}\label{algorithms} - \subsubsection{Techniques}\label{techniques} + Such tools are usually provided by exchanges which correspond the amount of positive and negative sentiments with a suggestion to buy and sell. These tools, however, are vague in their suggestions as they don't provide any further analysis on when the best time would be to conduct an action on the market, and simply display the number of tweets per sentiment level. A well-known cryptocurrency tracking site,\href{https://www.coingecko.com}{Coingecko} provides a basic sentiment analysis tool for their top 30 ranking cryptocurrencies tracked on the site. This tool shows the sentiment analysis of tweets from Twitter every hour for a given cryptocurrency. This is displayed as a simple pill on the page showing the ratios of positive, neutral and negative tweets. \textit{See Appendix C for visual representation} + + \subsection{Related research} + + There has been a plentiful amount of research conducted in this problem domain. Numerous theses globally have been published in recent years on the topic of cryptocurrency market predictions and analysis, and even more, research conducted on general stock markets further back. + + The thesis written by \textit{Evita Stenqvist and Jacob Lonno} of the \textit{KTH Royal Institute of Technology} \cite{2} investigates the use of sentiment expressed through micro-blogging such as Twitter can have on the price fluctuations of Bitcoin. Its primary focus was creating an analyser for the sentiment of tweets more accurately \textit{"by not only taking into account negation, but also valence, common slang and smileys"}, than that of former researchers that \textit{"mused that accounting for negations in text may be a step in the direction of more accurate predictions."}. This would be built upon the lexicon-based sentiment analyser VADER to ascertain the overall sentiment scores were grouped into time-series for each interval from 5 minutes to 4 hours, along with the interval prices for Bitcoin. The model chosen was a naive binary classified vectors of predictions for a certain threshold to \textit{"ultimately compare the predictions to actual historical price data"}. The results of this research suggest that a binary classification model of varying threshold over time-shifts in time-series data was "lackluster", seeing the number of predictions decreasing rapidly as the threshold changed. This research is a good basis of starting research upon, as it suggests tools such as VADER for sentiment analysis and that the use of a machine learning algorithm would be a next step in the project that would yield better more accurate results. + + Another thesis written by \textit{Pagolu, Venkata Sasank and Reddy Kamal Nayan, Panda Ganapati and Majhi, Babita} \cite{1} on "Sentiment Analysis of Twitter Data for Predicting Stock Market Movements" 2.5 million tweets on Microsoft were extracted from Twitter, sentiment analysis and logistical regression performed on the data yielded 69.01\% accuracy for a 3-day period on the increase/decrease in stock price. These results showed a "\textit{good correlation between stock market movements and the sentiments of the public expressed in Twitter}". Using various natural language pre-processing tweets for feature extraction such as N-gram representation the sentiment from tweets were extrapolated. Both Word2vec and a random forest classifier were compared for accuracy, Word2vec being chosen over the machine learning model. Word2vec, being a group of related shallow two-layer neural network models to produce word embeddings. + + A topic that reoccurs in various papers and theses is that of the use and focus of regression techniques and machine learning methods. Few implement a fully fledged neural network, the above paper attempts to use a simple network to achieve predictions of classification of sentiment for stock market movement then correlated this with historical data of prices. An article posted on "Code Project" by Intel Corporation \cite{3} compares the accuracy of three machine learning algorithms; Random Forest, Logistic Regression and Multi-Layer Perceptron (MLP) classifiers on predicting the price fluctuations of Bitcoin with embedded price indices. Results showing \textit{"that using the MLP classifier (a.k.a. neural networks) showed better results than logistic regression and random forest trained models"}. This assumption can be backed up by the results from a thesis posted on IEEE \cite{4} which compares a Bayesian optimised recurrent neural network and a Long Short Term Memory (LSTM) network. Showing the LSTM model achieving \textit{"the highest classification accuracy of 52\% and a RMSE of 8\%"}. With an interest in neural networks personally and with little papers utilising them for this purpose a neural network will thus be implemented, and the accuracy of one's predictions with use of sentiment analysis data analysed and discussed. + + \subsection{Data Collection}\label{tweet_collection} + + \subsubsection{Twitter and Twitter API} + Twitter is a micro-blogging platform that was launched in 2006 and provides it's users the ability to publish short messages of 140 characters. The messages published could be of any form, from news snippets, advertisement, or the prevalent publication of opinions which allowed a platform of extensive diversity and knowledge wealth. As of the time of writing, the message character limit was increased to 280 characters, the platform has over 300 million monthly active users and around 1 million tweets are published per day. Due to the length restriction and the primary use of the platform to express opinions Twitter is seen as a gold mine for opinion mining. + + The Twitter API has an extensive range of endpoints that provide access from streaming tweets for a given hashtag, obtaining historical tweets for a given time-period and hashtag, posting tweets on a users account and to change settings on a user account with authentication. The exhaustive range of features provided by these endpoints makes data collection from Twitter straight forward as one can target a specific endpoint for the required data. Due to Twitter being the target for opinion mining within this project the Twitter API will ultimately need to be utilised. This can either be used for the gathering of historical tweets or streaming current tweets for the \#Bitcoin hashtag. + + There are, however, limitations and rate limits imposed on users of the API. Twitter employs a tiering system for the API - Standard, Premium and Enterprise tiers, each of which provides different amounts of access for data collection. If the API were used to capture historical data for a span of 3 months, each tier is allowed to obtain varying amounts of data for different durations; \cite{5} + + \begin{itemize} + \item A Standard user would be able to capture 100 recent tweets for the past 7 days + \item A Premium user would be allowed to capture up to 500 tweets per request for a 30-day span and will have access to a full-archive search to query up to 100 tweets per request for a given time period, with a 50 request limit per month + \item An Enterprise user would be able to capture up to 500 tweets per unlimited requests for a 30-day span and will be able to query the full-archive of tweets for a given hashtag up to 2000 tweets per unlimited amount of requests for a given time period + \end{itemize} + + Each tier has individual costs while the standard user negating this as a basic tier. Due to only being elegable for the Premium tier for educational purposes, historical data gathering will be limited to 100 tweets per request with a limitation of 50 requests per month. Furthermore streaming tweets is an Enterprise feature which rules out the the Twitter API for use of streaming current real-time data \cite{6}. + + \subsubsection{Tweepy Python Package} + Tweepy is a python package for accessing the Twitter API. It fundamentally accomplishes the same means if one to conduct a GET request to the Twitter API, except it simplifies this into a simple to use API that is easier to implement and automate in python \cite{7}. Consequently, it builds upon the existing Twitter API to provide features such as automated streaming of provided hashtags to the API. It realises this by initialising a listener instance for a provided set of API credentials, handling authentication, connections, creating and destroying sessions. Due to Twitter's streaming API being only available to Enterprise users \cite{6}, using Tweepy to stream data for a given hashtag will provide the real-time data needed. + + + \subsection{Sentiment Analysis}\label{sentiment} + In short, sentiment analysis is the process and discovery of computationally identifying and categorising the underlining opinions and subjectivity expressed in written language. This process determines the writer's attitude towards a particular topic as either being positive, neutral or negative in terms of opinion, known as polarity classification. + + \subsubsection{Natural Language Processing}\label{algorithms} + Polarity classification is the focus of sentiment analysis and is a well-known problem in natural language processing that has had significant attention by researchers in recent years \cite{1}\cite{2}\cite{4}\cite{8}. Traditional approaches to this have usually been classified to dictionary-based approaches that use a pre-constructed sentiment lexicons such as VADER or usually confined to machine learning approaches. The later requires an extensive amount of natural language pre-processing to extrapolate vectors and features from given text, this is then fed into a machine learning classifier which attempts to categorise words to a level of sentiment polarity. Natural language pre-processing techniques that would be required for this approach would consist of; + + \begin{itemize} + \item Tokenisation: The act of splitting a stream of text into smaller units of typographical tokens which isolate unneeded punctuation. + \item Removal of domain specific expressions that are not part of general purpose English tokenisers - a particular problem with the nature of the language used in tweets, with @-mentions and \#-hashtags + \item Stopword removal: Are commonly used words (such as "the","in","a") that provide no meaning to the sentiment of a given text + \item Stemming: Is used to replace words with common suffixes and prefixes, as in "go" and "goes" fundamentally convey the same meaning. A stemmer will replace such words with their reduced counterparts + \item Term Probability Identification and Feature Extraction: This is a process that involves identifying the most frequently used words in a given text, by using a probability type approach on a pre-defined dataset which classifies a range of texts as with overall negative or positive a machine learning algorithm is trained to classify these accordingly. + \end{itemize} + + The former, seen and has been proven to provide higher accuracy than traditional machine learning approaches \cite{9}, and need little pre-proeccesing conducted on the data as words have a pre-defined sentiment classification in a provided lexicon. Although these lexicons can be complex to create, they generally require little resources to use and add to. + + \subsubsection{Valence Aware Dictionary and sEntiment Reasoning}\label{Vader} + VADER is a combined lexicon and rule-based sentiment analysis tool that is specifically attuned to sentiments expressed in social media, and works well on texts from other domains. It is capable of detecting the polarity of a given text - positivity, neurality, and negativity \cite{10}. VADER uses a human-centric approach to sentiment analysis, combining qualitative analysis and empirical validation by using human raters to rate level of sentiment for words in it’s lexicon. Vader also has emoticon support which maps these colloquailisms have pre-defined intensities in its lexicon, which makes VADER specifically suitable for the social media domain where the used of emoticons, utf-8 emojis and slang such as "Lol" and "Yolo" are prevalent within text. Additionally, VADER is provided as a lexicon and a python library under the MIT license, thus means that it is open-source software. This means that the lexicon can be altered and added to making it able to being tailored to specific topic domains. + + VADER was constructed by examining and extracting features from three pre-existing well-established and human-validated sentiment lexicons \cite{10} - (LIWC) Linguistic Inquiry and Word Count, (ANEW) Affective Norms for English Words, and (GI) General Inquirer. This is supplemented with additional lexicon features \textit{"commonly used to express sentiment in social media text (emoticons, acronyms and slang)"} \cite{10} and uses "wisdom-of-the-crowd" approach \cite{11} to establish a point of estimations of sentiment valance for each lexical feature candidate. This was evaluated for the impact of grammatical and syntactical rules and 7,500+ lexical features, with mean valence \textit{"<> zero, and SD <= 2.5"} as a human-validated "gold-standard" sentiment lexicon. \cite{10}\textit{Section 3.1} + + VADER is seen as a "Gold Standard" for sentiment analysis, in the paper for VADER, \cite{10} \textit{A Parsimonious Rule-based Model for Sentiment Analysis of Social Media Text}, it was compared against 11 other \textit{"highly regarded sentiment analysis tools/techniques on a corpus of over 4.2K tweets"} for polarity classification across 4 domains. Results showing VADER, across Social media text, Amazon reviews, movie reviews and Newspaper editorials, consistently outperforming other sentiment analysis tools and techniques showing a particular trend in performing significantly higher on analysis of sentiment in tweets. \cite{10} \textit{Section 4: Results} \subsection{Neural Networks}\label{networks} - \subsubsection{Types}\label{types} - \subsubsection{LSTMs}\label{lstms} + A neural network is a set of perceptrons modelled loosely after the human brain that is designed to recognise patterns in whatever domain it is intended to be trained on. A neural network can consist of multiple machine perceptrons or clustering layers in a large mesh network and the patterns they recognise are numerical which are contained in vectors. Pre-processed data, confined and processed into pre-defined vector labels, are used to teach a neural network for a given task. While this differs from how an algorithm is coded to a particular task, neural networks cannot be programmed directly for the task. The requirement is for them to learn from the information by use of different learning strategies; \cite{12}\cite{13} + + \begin{center} + \includegraphics[width=10cm,height=6cm]{images/perceptron.png} + \newline + \textit{Figure 1: Basic perceptron layout} + \end{center} + + \begin{itemize} + \item Supervised learning: Simplest of the learning forms, where a dataset have been labeled which indicate the correct classified data. The input data is learned upon until the desired result of the label is reached \cite{14} + \item Unsupervised learning: Is training the with a dataset without labels to learn from. The neural network analyses the dataset with a cost function which tells the neural network how far off target a prediction was. The neural network then adjusts input weights in attempt to increase accuracy. \cite{13} + \item Reinforced learning: The neural network is reinforced with positive results and punished for negative results causing a network to learn over iterations. + \end{itemize} + + \subsubsection{Recurrent Neural Network (RNN)}\label{types} + The type of neural network that is of focus for this project will be that of a Long-Short Term Memory (LSTM), however, it is important to understand how this is an extension of a Recurrent Neural Network (RNN) and how the underlying network works. + + Recurrent Neural Networks (RNN) are a robust and powerful type of neural network and is considered to be among the most encouraging algorithms for use of classification, due to the fact of having internal memory. RNNs are designed to recognise patterns in sequences of presented data or most suitably, time-series data, genomes, handwriting and stock market data. Although RNNs were conceptualised and invented back in the 1980s \cite{15} they've only really shown their potential in recent years, with the increase of computational power due to the level of sequencing and internal memory store to retrain. + Due to this 'internal' memory loop, RNNs are able to remember data and adjust neurons based on failures and alternating parameters. The way this is accomplished, knowing how a standard neural network such as a feed-forward network, should initially be understood. \cite{16} + + A standard, feed-forward neural network has a single data flow with an input layer, through hidden computational layers, to an output layer. Therefore any node in the network will never see the same data again. However, in an RNN data is cycled through a loop over the same node, thus two inputs into the perceptron. Decisions are influenced by previous data that it has previously learned from if any, which in turn affects output and the weights of the network. \cite{17} + + \begin{center} + \includegraphics[width=15cm,height=6cm]{images/rnn_ffn.png} + \newline + \textit{Figure 2: Feed-forward network (left) vs Recurrent Neural network (right)} + \end{center} + + The act of tweaking weights to alter the processing of the next iteration of data in an RNN is called backpropagation, which in short means going back through the network to find the partial derivatives of the error with respect to the weights after output has occurred. Along with gradient descent, an algorithm that adjusts the weights up or down depending on which would reduce the error. There are however a few obstacles of RNNs; + + \begin{itemize} + \item Exploding Gradients: Is when gradient decsent assigns high importance to the weights. As in the algorithm assigns a ridiculously high or low value for the weights on iteration which can cause overlow and result in NaN values \cite{18} + \item Vanishing Gradients: Is when the values of a gradient are small enough that weights cannot be altered and the model stops learning. \cite{19} + \end{itemize} + + These issues are overcome by the concept of Long-Short Term Memory neural networks, coined by \textit{Sepp Hochreiter and Juergen Schmidhuber, 1997} \cite{20}. + + \subsubsection{Long-Short Term Memory (LSTM)}\label{lstms} + LSTMs are an extension of recurrent neural networks capable of learning long-term dependancies and were conceptualised by \textit{Sepp Hochreiter and Juergen Schmidhuber, 1997} \cite{20}. LSTMs were explicity designed to avoid long-term dependancy problems such as exploding and vanishing gradients. As they are an extension of RNNs they operating in almost the exact same manner, but stores the actual gradients and weights in memory which allows for LSTMs to read, write and alter the values. A way of explaining how this works is seeing the memory block as a gated cell, where 'gated' is that the cell decides whether or not to store or alter data in it's memory based input data and the importance assigned to it. In a sense it learns over time of which values and data is important. + + \begin{center} + \includegraphics[width=10cm,height=8cm]{images/lstm.png} + \newline + \textit{Figure 3: A conceptual design of an LSTM cell bank - from Medium article by Shi Yan: Understanding LSTM and its diagrams}\cite{21} + \end{center} + + The network takes in three initial inputs, input of current time step, output from the previous LSTM unit if any, and the memory of the previous unit. Outputs, $H_t$ - output of current network, and $C_t$ - the memory of the current unit. \cite{21} + + The various steps of the network decide what information is thrown away from the cell state, through use of a 'forget gate' which is influencted by the calculations of sigmod memory gates which influence how much of old and new memory is used %$C_t_\--1$,UNCOMMENT %$H_t_\--1$% + and $X_t$, and merged together based upon importance. The section of the cell that controls the outflow memory $H_t$ and $C_t$ determines how much of the new memory should be used by the next LSTM unit. + \textit{For a more in-detailed explanation of exactly how the calculations are made see} \cite{20},\cite{21} and \cite{22}. + + As mentioned in the formost section of related work an LSTM network, the use of one would be optimal for the given problem domain over the use of machine learning algorithms. + + \subsubsection{Kera and TensorFlow} + + \subsubsection{Optimisers} + ADAgrad optimizer, RMSprop, Adam + + \subsubsection{Loss} + + \subsubsection{Regularisation - Dropouts} \subsection{Machine Learning}\label{machine} - \subsubsection{Logistical Regression} + \subsubsection{Naive Bayes} + + \subsection{Other Techniques} + \subsubsection{Ngrams} + \subsubsection{Scoring and Validation} + F-score, Precision, Accuracy, Validation Datasets, Cross-Validation \newpage @@ -156,6 +310,8 @@ \subsection{Solution Summary}\label{sumary} + A system will be created that will utilise + \subsection{Data flow Overview}\label{data-flow} \subsection{Packages, Tools and Techniques}\label{tools} @@ -166,6 +322,8 @@ \section{System Design and Implementation}\label{implementation} \end{center} \subsection{Data collection}\label{collection} + \subsubsection{Price Time-series Data} + Historical data of Bitcoin prices can be obtained through may means, \subsection{Data processing}\label{processing} \subsubsection{Preprocessing} @@ -176,13 +334,15 @@ \subsubsection{Spam Filtering} \paragraph{Tweet Processing} \paragraph{Naive Bayes model} - \paragraph{Multinomial Naive Bayes} - \paragraph{Bernoullis Naive Bayes} - \paragraph{Gaussuan Naive Bayes} \subsection{Sentiment Analysis} \subsubsection{VADER} + \subsection{Recurrent Neural Network} + \subsubsection{Training and Testing Model} + \subsubsection{Validation} + \subsubsection{Future Prediction Forecasting} + \newpage \section{Testing: Verification and Reflection} @@ -192,19 +352,16 @@ \section{Discussion: Contribution and Reflection} \subsection{Limitations} - \newpage - - \section{Social, Legal and Ethical Issues} \newpage \section{Conclusion and Future Improvements} \subsection{Conclusion} \subsection{Future Improvements} + Shifting the intial data by and hour and sequencing over previous data \newpage - \section{References} \nocite{*} \printbibliography diff --git a/document.toc b/document.toc index 4e28168..f1dd7e0 100644 --- a/document.toc +++ b/document.toc @@ -20,82 +20,100 @@ \defcounter {refsection}{0}\relax \contentsline {subsection}{Existing Tools}{9}{section*.11} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Related Work}{9}{section*.12} +\contentsline {subsection}{Related research}{9}{section*.12} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Tweet Collection}{9}{section*.13} +\contentsline {subsection}{Data Collection}{10}{section*.13} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Sentiment Analysis}{9}{section*.14} +\contentsline {subsubsection}{Twitter and Twitter API}{10}{section*.14} \defcounter {refsection}{0}\relax -\contentsline {subsubsection}{Algorithms}{9}{section*.15} +\contentsline {subsubsection}{Tweepy Python Package}{11}{section*.15} \defcounter {refsection}{0}\relax -\contentsline {subsubsection}{Techniques}{9}{section*.16} +\contentsline {subsection}{Sentiment Analysis}{11}{section*.16} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Neural Networks}{9}{section*.17} +\contentsline {subsubsection}{Natural Language Processing}{11}{section*.17} \defcounter {refsection}{0}\relax -\contentsline {subsubsection}{Types}{9}{section*.18} +\contentsline {subsubsection}{Valence Aware Dictionary and sEntiment Reasoning}{12}{section*.18} \defcounter {refsection}{0}\relax -\contentsline {subsubsection}{LSTMs}{9}{section*.19} +\contentsline {subsection}{Neural Networks}{12}{section*.19} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Machine Learning}{9}{section*.20} +\contentsline {subsubsection}{Recurrent Neural Network (RNN)}{13}{section*.20} \defcounter {refsection}{0}\relax -\contentsline {subsubsection}{Logistical Regression}{9}{section*.21} +\contentsline {subsubsection}{Long-Short Term Memory (LSTM)}{14}{section*.21} \defcounter {refsection}{0}\relax -\contentsline {section}{Solution Approach}{10}{section*.22} +\contentsline {subsubsection}{Kera and TensorFlow}{15}{section*.22} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Solution Summary}{10}{section*.23} +\contentsline {subsubsection}{Optimisers}{15}{section*.23} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Data flow Overview}{10}{section*.24} +\contentsline {subsubsection}{Loss}{16}{section*.24} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Packages, Tools and Techniques}{10}{section*.25} +\contentsline {subsubsection}{Regularisation - Dropouts}{16}{section*.25} \defcounter {refsection}{0}\relax -\contentsline {section}{System Design and Implementation}{11}{section*.26} +\contentsline {subsection}{Machine Learning}{16}{section*.26} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Data collection}{11}{section*.27} +\contentsline {subsubsection}{Naive Bayes}{16}{section*.27} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Data processing}{11}{section*.28} +\contentsline {subsection}{Other Techniques}{16}{section*.28} \defcounter {refsection}{0}\relax -\contentsline {subsubsection}{Preprocessing}{11}{section*.29} +\contentsline {subsubsection}{Ngrams}{16}{section*.29} \defcounter {refsection}{0}\relax -\contentsline {paragraph}{Tweet Filtering}{11}{section*.30} +\contentsline {subsubsection}{Scoring and Validation}{16}{section*.30} \defcounter {refsection}{0}\relax -\contentsline {paragraph}{Text Cleaning}{11}{section*.31} +\contentsline {section}{Solution Approach}{17}{section*.31} \defcounter {refsection}{0}\relax -\contentsline {paragraph}{Ngram based Language detection filtering}{11}{section*.32} +\contentsline {subsection}{Solution Summary}{17}{section*.32} \defcounter {refsection}{0}\relax -\contentsline {subsubsection}{Spam Filtering}{11}{section*.33} +\contentsline {subsection}{Data flow Overview}{17}{section*.33} \defcounter {refsection}{0}\relax -\contentsline {paragraph}{Tweet Processing}{11}{section*.34} +\contentsline {subsection}{Packages, Tools and Techniques}{17}{section*.34} \defcounter {refsection}{0}\relax -\contentsline {paragraph}{Naive Bayes model}{11}{section*.35} +\contentsline {section}{System Design and Implementation}{18}{section*.35} \defcounter {refsection}{0}\relax -\contentsline {paragraph}{Multinomial Naive Bayes}{11}{section*.36} +\contentsline {subsection}{Data collection}{18}{section*.36} \defcounter {refsection}{0}\relax -\contentsline {paragraph}{Bernoullis Naive Bayes}{11}{section*.37} +\contentsline {subsubsection}{Price Time-series Data}{18}{section*.37} \defcounter {refsection}{0}\relax -\contentsline {paragraph}{Gaussuan Naive Bayes}{11}{section*.38} +\contentsline {subsection}{Data processing}{18}{section*.38} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Sentiment Analysis}{11}{section*.39} +\contentsline {subsubsection}{Preprocessing}{18}{section*.39} \defcounter {refsection}{0}\relax -\contentsline {subsubsection}{VADER}{11}{section*.40} +\contentsline {paragraph}{Tweet Filtering}{18}{section*.40} \defcounter {refsection}{0}\relax -\contentsline {section}{Testing: Verification and Reflection}{12}{section*.41} +\contentsline {paragraph}{Text Cleaning}{18}{section*.41} \defcounter {refsection}{0}\relax -\contentsline {section}{Discussion: Contribution and Reflection}{13}{section*.42} +\contentsline {paragraph}{Ngram based Language detection filtering}{18}{section*.42} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Limitations}{13}{section*.43} +\contentsline {subsubsection}{Spam Filtering}{18}{section*.43} \defcounter {refsection}{0}\relax -\contentsline {section}{Social, Legal and Ethical Issues}{14}{section*.44} +\contentsline {paragraph}{Tweet Processing}{18}{section*.44} \defcounter {refsection}{0}\relax -\contentsline {section}{Conclusion and Future Improvements}{15}{section*.45} +\contentsline {paragraph}{Naive Bayes model}{18}{section*.45} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Conclusion}{15}{section*.46} +\contentsline {subsection}{Sentiment Analysis}{18}{section*.46} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Future Improvements}{15}{section*.47} +\contentsline {subsubsection}{VADER}{18}{section*.47} \defcounter {refsection}{0}\relax -\contentsline {section}{References}{16}{section*.48} +\contentsline {subsection}{Recurrent Neural Network}{18}{section*.48} \defcounter {refsection}{0}\relax -\contentsline {section}{Appendices}{17}{section*.50} +\contentsline {subsubsection}{Training and Testing Model}{18}{section*.49} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Appendix A - Project Initiation Document}{17}{section*.51} +\contentsline {subsubsection}{Validation}{18}{section*.50} \defcounter {refsection}{0}\relax -\contentsline {subsection}{Appendix B - Log book}{30}{section*.52} +\contentsline {subsubsection}{Future Prediction Forecasting}{18}{section*.51} +\defcounter {refsection}{0}\relax +\contentsline {section}{Testing: Verification and Reflection}{19}{section*.52} +\defcounter {refsection}{0}\relax +\contentsline {section}{Discussion: Contribution and Reflection}{20}{section*.53} +\defcounter {refsection}{0}\relax +\contentsline {subsection}{Limitations}{20}{section*.54} +\defcounter {refsection}{0}\relax +\contentsline {section}{Conclusion and Future Improvements}{21}{section*.55} +\defcounter {refsection}{0}\relax +\contentsline {subsection}{Conclusion}{21}{section*.56} +\defcounter {refsection}{0}\relax +\contentsline {subsection}{Future Improvements}{21}{section*.57} +\defcounter {refsection}{0}\relax +\contentsline {section}{Appendices}{24}{section*.59} +\defcounter {refsection}{0}\relax +\contentsline {subsection}{Appendix A - Project Initiation Document}{24}{section*.60} +\defcounter {refsection}{0}\relax +\contentsline {subsection}{Appendix B - Log book}{37}{section*.61} diff --git a/images/gru.png b/images/gru.png new file mode 100644 index 0000000..6838a20 Binary files /dev/null and b/images/gru.png differ diff --git a/images/lstm.png b/images/lstm.png new file mode 100644 index 0000000..b76c309 Binary files /dev/null and b/images/lstm.png differ diff --git a/images/perceptron.png b/images/perceptron.png new file mode 100644 index 0000000..2e8613d Binary files /dev/null and b/images/perceptron.png differ diff --git a/images/reading_logo.png b/images/reading_logo.png new file mode 100644 index 0000000..f1e5889 Binary files /dev/null and b/images/reading_logo.png differ diff --git a/images/rnn_ffn.png b/images/rnn_ffn.png new file mode 100644 index 0000000..3f0fb18 Binary files /dev/null and b/images/rnn_ffn.png differ diff --git a/report.bib b/report.bib index 4e2466f..c5c279f 100644 --- a/report.bib +++ b/report.bib @@ -6,4 +6,214 @@ year={2016}, organization={IEEE}, url = {https://arxiv.org/pdf/1610.09225.pdf} +} + +@inproceedings{2, + title={Predicting Bitcoin price fluctuation with Twitter sentiment analysis}, + author={Evita Stenqvist, Jacob Lonno}, + booktitle={}, + pages={}, + year={2017}, + organization={Diva}, + url = {http://www.diva-portal.org/smash/get/diva2:1110776/FULLTEXT01.pdf} +} + +@inproceedings{3, + title={Stock Predictions through News Sentiment Analysis}, + author={Intel-Corporation}, + booktitle={}, + pages={}, + year={2017}, + organization={Code Project}, + url = {https://www.codeproject.com/Articles/1201444/Stock-Predictions-through-News-Sentiment-Analysis} +} + +@inproceedings{4, + title={Predicting the Price of Bitcoin Using Machine Learning}, + author={Sean McNally, Jason Roche, Simon Caton}, + booktitle={2018 26th Euromicro International Conference on Parallel, Distributed and Network-based Processing (PDP)}, + pages={344--347}, + year={2018}, + organization={IEEE}, + url = {https://ieeexplore.ieee.org/abstract/document/8374483} +} + +@inproceedings{5, + title={Search Tweets}, + author={Twitter}, + booktitle={}, + pages={}, + year={2018}, + organization={Twitter Developers}, + url = {https://developer.twitter.com/en/docs/tweets/search/overview} +} + +@inproceedings{6, + title={Consuming streaming data}, + author={Twitter}, + booktitle={}, + pages={}, + year={2018}, + organization={Twitter Developers}, + url = {https://developer.twitter.com/en/docs/tutorials/consuming-streaming-data.html} +} + +@inproceedings{7, + title={Streaming With Tweepy}, + author={Joshua Roesslein}, + booktitle={}, + pages={}, + year={2009}, + organization={Tweepy}, + url = {http://docs.tweepy.org/en/v3.4.0/streaming_how_to.html} +} + +@inproceedings{8, + title={Using Linked Data for polarity classification of patients experiences}, + author={Mehrnoush Shamsfard, Samira Noferesti}, + booktitle={Journal of Biomedical Informatics}, + pages={6-19}, + year={2015}, + organization={Elsevier}, + url = {https://www.sciencedirect.com/science/article/pii/S1532046415001276} +} + +@inproceedings{9, + title={Social media sentiment analysis: lexicon versus machine learning}, + author={Chedia Dhaoui, Cynthia M. Webster, Lay Peng Tan}, + booktitle={Journal of Consumer Marketing, Volume 34. Issue 6}, + pages={}, + year={2017}, + organization={Emerald Insight}, + url = {https://www.emeraldinsight.com/doi/pdfplus/10.1108/JCM-03-2017-2141} +} + +@inproceedings{10, + title={VADER: A Parsimonious Rule-based Model for Sentiment Analysis of Social Media Text}, + author={C.J. Hutto and Eric Gilbert}, + booktitle={Eighth International Conference on Weblogs and Social Media (ICWSM-14)}, + pages={}, + year={2014}, + organization={Ann Arbor, MI}, + url = {https://www.aaai.org/ocs/index.php/ICWSM/ICWSM14/paper/download/8109/8122} +} + +@inproceedings{11, + title={Wisdom of Crowds}, + author={Will Kenton}, + booktitle={}, + pages={}, + year={2018}, + organization={Investopedia}, + url = {https://www.investopedia.com/terms/w/wisdom-crowds.asp} +} + +@inproceedings{12, + title={A Beginner's Guide to Neural Networks and Deep Learning}, + author={Skymind}, + booktitle={A.I. Wiki}, + pages={}, + year={2018}, + organization={Skymind}, + url = {https://skymind.ai/wiki/neural-network} +} + +@inproceedings{13, + title={What is a neural network}, + author={Jonas DeMuro}, + booktitle={World of tech}, + pages={}, + year={2018}, + organization={techradar}, + url = {https://www.techradar.com/uk/news/what-is-a-neural-network} +} + +@inproceedings{14, + title={Supervised dictionary learning}, + author={Mairal, J., Ponce, J., Sapiro, G., Zisserman, A. and Bach, F.R., }, + booktitle={Advances in neural information processing systems }, + pages={1033--1040}, + year={2009}, + organization={NIPS Proceedings}, + url = {http://papers.nips.cc/paper/3448-supervised-dictionary-learning} +} + +@inproceedings{15, + title={Learning internal representations by error propagation}, + author={Rumelhart, David E and Hinton, Geoffrey E and Williams, Ronald J}, + booktitle={}, + pages={}, + year={1985}, + organization={California Univ San Diego La Jolla Inst for Cognitive Science}, + url = {https://apps.dtic.mil/docs/citations/ADA164453} +} + +@inproceedings{16, + title={A Beginner's Guide to LSTMs and Recurrent Neural Networks}, + author={Skymind}, + booktitle={A.I. Wiki}, + pages={}, + year={2018}, + organization={Skymind}, + url = {https://skymind.ai/wiki/lstm} +} + +@inproceedings{17, + title={Recurrent Neural Networks and LSTM}, + author={Niklas Donges}, + booktitle={}, + pages={}, + year={2018}, + organization={Towards Data Science}, + url = {https://towardsdatascience.com/recurrent-neural-networks-and-lstm-4b601dd822a5} +} + +@inproceedings{18, + title={A Gentle Introduction to Exploding Gradients in Neural Networks}, + author={Jason Brownlee, PhD.}, + booktitle={}, + pages={}, + year={2017}, + organization={Machine Larning Mastery}, + url = {https://machinelearningmastery.com/exploding-gradients-in-neural-networks/} +} + +@inproceedings{19, + title={Recurrent Neural Networks (RNN) - The Vanishing Gradient Problem}, + author={Super Data Science Team}, + booktitle={}, + pages={}, + year={2018}, + organization={Super Data Science}, + url = {https://www.superdatascience.com/blogs/recurrent-neural-networks-rnn-the-vanishing-gradient-problem} +} + +@inproceedings{20, + title={Long short-term memory}, + author={Hochreiter, Sepp and Schmidhuber, Jurgen}, + booktitle={Neural computation, Volume 9. 8}, + pages={1735--1780}, + year={1997}, + organization={MIT Press}, + url = {https://www.bioinf.jku.at/publications/older/2604.pdf} +} + +@inproceedings{21, + title={Understanding LSTM and its diagrams}, + author={Shi Yan}, + booktitle={}, + pages={}, + year={Mar 13, 2016}, + organization={Medium}, + url = {https://medium.com/mlreview/understanding-lstm-and-its-diagrams-37e2f46f1714} +} + +@inproceedings{22, + title={Understanding LSTM Networks}, + author={Christopher Olah}, + booktitle={}, + pages={}, + year={2015}, + organization={}, + url = {https://colah.github.io/posts/2015-08-Understanding-LSTMs} } \ No newline at end of file