diff --git a/_bibliography.bib b/_bibliography.bib
index 9515a62eb168f2d2029c7bf632480b108f9c30df..de51cef8a3000c93245f6d054e4d67eb4090b7c3 100644
--- a/_bibliography.bib
+++ b/_bibliography.bib
@@ -105,3 +105,13 @@ isbn="978-3-540-36127-5"
   journal={arXiv preprint arXiv:2212.03533},
   year={2022}
 }
+
+
+@misc{mixtral7b,
+  title = {mixtral-instruct-8x7b-quantized-gguf},
+  howpublished = {\url{https://huggingface.co/ikawrakow/mixtral-instruct-8x7b-quantized-gguf}},
+  note = {Accessed: 2024-07-19}
+}
+
+
+
diff --git a/appendix.aux b/appendix.aux
index 7c2acb30c74641f7d1f4ff5febb5b97fe952c162..9f992cc5aacafa4cb397f93de06e633def44f39d 100644
--- a/appendix.aux
+++ b/appendix.aux
@@ -1,11 +1,11 @@
 \relax 
 \providecommand\hyper@newdestlabel[2]{}
-\@writefile{toc}{\contentsline {chapter}{\numberline {7}Appendix}{25}{chapter.7}\protected@file@percent }
+\@writefile{toc}{\contentsline {chapter}{\numberline {7}Appendix}{27}{chapter.7}\protected@file@percent }
 \@writefile{lof}{\addvspace {10\p@ }}
 \@writefile{lot}{\addvspace {10\p@ }}
 \@writefile{lol}{\addvspace {10\p@ }}
 \@setckpt{appendix}{
-\setcounter{page}{26}
+\setcounter{page}{28}
 \setcounter{equation}{0}
 \setcounter{enumi}{0}
 \setcounter{enumii}{0}
@@ -27,7 +27,7 @@
 \setcounter{Item}{0}
 \setcounter{Hfootnote}{0}
 \setcounter{bookmark@seq@number}{0}
-\setcounter{caption@flags}{6}
+\setcounter{caption@flags}{2}
 \setcounter{continuedfloat}{0}
 \setcounter{subfigure}{0}
 \setcounter{subtable}{0}
@@ -40,7 +40,7 @@
 \setcounter{citetotal}{0}
 \setcounter{multicitecount}{0}
 \setcounter{multicitetotal}{0}
-\setcounter{instcount}{8}
+\setcounter{instcount}{0}
 \setcounter{maxnames}{3}
 \setcounter{minnames}{1}
 \setcounter{maxitems}{3}
diff --git a/conclusion.aux b/conclusion.aux
index a996f970ed214edfb51a2bdcbca0f06f89ec61d0..e38618640026cf53cf71884cb7edef4cfa3d622a 100644
--- a/conclusion.aux
+++ b/conclusion.aux
@@ -1,14 +1,14 @@
 \relax 
 \providecommand\hyper@newdestlabel[2]{}
-\@writefile{toc}{\contentsline {chapter}{\numberline {6}Conclusion}{21}{chapter.6}\protected@file@percent }
+\@writefile{toc}{\contentsline {chapter}{\numberline {6}Conclusion}{25}{chapter.6}\protected@file@percent }
 \@writefile{lof}{\addvspace {10\p@ }}
 \@writefile{lot}{\addvspace {10\p@ }}
 \@writefile{lol}{\addvspace {10\p@ }}
-\@writefile{toc}{\contentsline {section}{\numberline {6.1}summary}{21}{section.6.1}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {6.2}future work}{21}{section.6.2}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {6.3}note of thanks}{21}{section.6.3}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {6.1}summary}{25}{section.6.1}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {6.2}future work}{25}{section.6.2}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {6.3}note of thanks}{25}{section.6.3}\protected@file@percent }
 \@setckpt{conclusion}{
-\setcounter{page}{22}
+\setcounter{page}{26}
 \setcounter{equation}{0}
 \setcounter{enumi}{0}
 \setcounter{enumii}{0}
@@ -30,7 +30,7 @@
 \setcounter{Item}{0}
 \setcounter{Hfootnote}{0}
 \setcounter{bookmark@seq@number}{0}
-\setcounter{caption@flags}{6}
+\setcounter{caption@flags}{2}
 \setcounter{continuedfloat}{0}
 \setcounter{subfigure}{0}
 \setcounter{subtable}{0}
@@ -43,7 +43,7 @@
 \setcounter{citetotal}{0}
 \setcounter{multicitecount}{0}
 \setcounter{multicitetotal}{0}
-\setcounter{instcount}{4}
+\setcounter{instcount}{0}
 \setcounter{maxnames}{3}
 \setcounter{minnames}{1}
 \setcounter{maxitems}{3}
diff --git a/conclusion.tex b/conclusion.tex
index 5ab2213deaf2e6faffb9acb89bca0e705ae9dda1..b3ed177ef6bfd0ccca2b08b0dd231107f73603d4 100644
--- a/conclusion.tex
+++ b/conclusion.tex
@@ -2,5 +2,19 @@
 
 \section{summary}
 \section{future work}
+For me, what comes first to mind when it comes to future work is how annoying and extensive time i had to put into scraping single pdfs. Every author has a different way to structure their book with different placements for pagenumbers, chapternumbers and -names which makes it really hard to find the right way to scrape them all efficiently. But plenty and clean data is so important when it is the bases for almost everything that follows up. Therefore i would condone to create the amount of literatur and characters and even improve on the already so carefully filtered data.
+There is a second part to the mentioned data filtering which also leaves room to improve on which are the human-written fandom articles from fandom.com. Since some of them are so long since they contain so much detail i had to cut them to a specific paragraph. Take whole???
+
+More evaluation methods
+
+Since LLMs will continue to improve and might also increase their maximum input size to a point where maybe the input size will easily handle any single book, the task might shift to 
+
 \section{note of thanks}
-\textit{Here I will describe future work that can be done to improve my work and talk about the advantages, disadvantages of the methods I used but also other problems that occured during the time (e.g. Iack of literatur) that I was able to obtain}
\ No newline at end of file
+I would like to extend my heartfelt gratitude to all those who have knowingly or unknowingly supported me throughout the completion of this master's thesis.
+
+First and foremost, to my advisor, Hans Ole Hatzel, for his unwavering guidance, invaluable insights, and constant encouragement. Your expertise and patience have been instrumental in shaping this research.
+
+I would also like to thank my family and friends, whose love, support, and understanding throughout this semester provided me with both motivation and inspiration.
+
+
+Finally, I am grateful to all those, named and unnamed, who have directly or indirectly contributed to this thesis. Your support has been invaluable, and I am deeply thankful for your contributions.
diff --git a/dataset.aux b/dataset.aux
index 5381991ae9a015c880fd25347170df930a864269..7d7147603c239a5c5c60735df7fc5177c90fd10a 100644
--- a/dataset.aux
+++ b/dataset.aux
@@ -1,14 +1,14 @@
 \relax 
 \providecommand\hyper@newdestlabel[2]{}
-\@writefile{toc}{\contentsline {chapter}{\numberline {4}Gathering of literature}{11}{chapter.4}\protected@file@percent }
+\@writefile{toc}{\contentsline {chapter}{\numberline {4}Gathering of literature}{13}{chapter.4}\protected@file@percent }
 \@writefile{lof}{\addvspace {10\p@ }}
 \@writefile{lot}{\addvspace {10\p@ }}
 \@writefile{lol}{\addvspace {10\p@ }}
-\@writefile{lot}{\contentsline {table}{\numberline {4.1}{\ignorespaces dataset of characters and their descriptions}}{12}{table.caption.3}\protected@file@percent }
+\@writefile{lot}{\contentsline {table}{\numberline {4.1}{\ignorespaces dataset of characters and their descriptions}}{14}{table.caption.4}\protected@file@percent }
 \providecommand*\caption@xref[2]{\@setref\relax\@undefined{#1}}
-\newlabel{tab:example_table}{{4.1}{12}{dataset of characters and their descriptions}{table.caption.3}{}}
+\newlabel{tab:example_table}{{4.1}{14}{dataset of characters and their descriptions}{table.caption.4}{}}
 \@setckpt{dataset}{
-\setcounter{page}{13}
+\setcounter{page}{15}
 \setcounter{equation}{0}
 \setcounter{enumi}{0}
 \setcounter{enumii}{0}
@@ -43,7 +43,7 @@
 \setcounter{citetotal}{0}
 \setcounter{multicitecount}{0}
 \setcounter{multicitetotal}{0}
-\setcounter{instcount}{4}
+\setcounter{instcount}{0}
 \setcounter{maxnames}{3}
 \setcounter{minnames}{1}
 \setcounter{maxitems}{3}
diff --git a/dataset.tex b/dataset.tex
index a0f8b41ab1b5665a0b4f09bd4f7f23b5c24c0772..dd1502db60aa4d050a51f946082a58086fddaf48 100644
--- a/dataset.tex
+++ b/dataset.tex
@@ -1,16 +1,13 @@
 
 \chapter{Gathering of literature}
 Unfortunately, there's barely any open-source collection of literature with 
-characterizations available. Examples like "Romeo and Juliet," "Moby Dick," "Frankenstein," or "Alice's Adventures in Wonderland" are rare cases where enough fandom exists to create accessible and reviewed content. In most other instances, it seems too risky to use open-source literature, as these collections predominantly consist of less popular books with minimal fanbase and related content. Popular literature, with its larger online presence, results in more detailed and reviewed community-generated content, such as characterizations and summaries, which are valuable as reference points for my generated characterizations.\\
+characterizations available. Examples like ``Romeo and Juliet'', ``Moby Dick'', ``Frankenstein'' or ``Alice's Adventures in Wonderland'' are rare cases where enough fandom exists to create accessible and reviewed content. In most other instances, it seems too risky to use open-source literature, as these collections predominantly consist of less popular books with minimal fanbase and related content. Popular literature, with its larger online presence, results in more detailed and reviewed community-generated content, such as characterizations and summaries, which are valuable as reference points for my generated characterizations which is why I will mostly rely on non-open source literatur for this thesis.\\
 
-All of the books contained text decorations and structural elements such as chapters, sections, and page numbers, which remained present after converting the PDFs and text files and loading them into memory. These elements had to be manually filtered out before further processing, as they interfered with some of the techniques applied later.\\
+During the process of using Wikidata, a free and open knowledge database, to query characters and filter personal descriptions from books, I discovered that many of these descriptions contain references to articles from fandom.com, the world's most popular open-source wiki platform for fan-related content. Initially, I planned to query Wikidata for all characters linked to Fandom articles to gather literature with the most comprehensive fandom articles. However, I realized that not all character descriptions in Wikidata include Fandom article links. Some character descriptions are missing Fandom article URLs, making it insufficient to rely solely on Wikidata for content. Additionally, there are instances of multiple articles linked to one character. Some articles are in different languages, while others are older versions or from different universes within the same saga. In most cases, I was able to chose to use the newest, longest English version but this was not always possible. For example, when fetching Dune character fandom articles, I had to manually sort out some characters. The Dune fandom includes characters from the ``Dune Encyclopedia'' and ``Expanded Dune'', as well as from the original ``Dune'' by Frank Herbert. This overlap made it problematic to compare information about the same character in different contexts, especially when relevant information might not be available across all contexts.\\
 
-During the process of using Wikidata, a free and open knowledge database, to query characters and filter personal descriptions from books, I discovered that many of these descriptions contain references to articles from Fandom.com, the world's most popular open-source wiki platform for fan-related content.\\
+In the end, I used multiple methods. First, I queried Wikidata to quickly obtain a large number of characters, then manually deleted duplicates and added additional characters with URLs by hand. The fetched articles varied significantly in length, requiring me to cut them down so they were roughly the same size. I achieved this by sequentially removing paragraphs from the bottom of each original article until they reached the desired length.
 
-Initially, I planned to query Wikidata for all characters linked to Fandom articles to gather literature with the most comprehensive fandom articles. However, I realized that not all character descriptions in Wikidata include Fandom article links. Some character descriptions are missing Fandom article URLs, making it insufficient to rely solely on Wikidata for content. Additionally, there are instances of multiple articles linked to one character. Some articles are in different languages, while others are older versions or from different universes within the same saga. In most cases, I was able to chose to use the newest, longest English version but this was not always possible. For example, when fetching Dune character fandom articles, I had to manually sort out some characters. The Dune fandom includes characters from the "Dune Encyclopedia" and "Expanded Dune," as well as from the original "Dune" by Frank Herbert. This overlap made it problematic to compare information about the same character in different contexts, especially when relevant information might not be available across all contexts.\\
-
-
-So in the end, I used multiple methods. First, I queried Wikidata to quickly obtain a large number of characters, then manually deleted duplicates and added additional characters with URLs by hand. Since readers of this thesis might not have access to all the non-open-source literature I used, I aimed to minimize the number of sources to make the results easier to replicate and verify. Ultimately, I was able to obtain character data for 800 characters from eight books in total. The results are linked in the appendix.
+Since readers of this thesis might not have access to all the non-open-source literature I used, I aimed to minimize the number of sources to make the results easier to replicate and verify. Ultimately, I was able to obtain character data for 534 characters from eight books in total. The results are linked in the appendix. All of the books contain text decorations and structural elements such as chapters, sections, and page numbers, which remained present after converting the PDFs and text files and loading them into memory. These elements had to be manually filtered out using regular expressions before further processing.\\
 
 
 
@@ -41,11 +38,6 @@ So in the end, I used multiple methods. First, I queried Wikidata to quickly obt
   \caption{dataset of characters and their descriptions}
   \label{tab:example_table}
   \end{table}
-...\\
-
-
-Project Gutenberg and the Tell Me Again! Dataset...
-
 
 
 
diff --git a/evaluation.tex b/evaluation.tex
deleted file mode 100644
index ec9f8b34933c5d592c92a254adf7e2c89fc91334..0000000000000000000000000000000000000000
--- a/evaluation.tex
+++ /dev/null
@@ -1,9 +0,0 @@
-\chapter{Evaluation}
-As we can see the generated result doesn't contain any wrong information but is also missing a lot of details. This might be due to already mentioned missing context as well as to gpt discarding seemingly irrelevant information.
-His old friends Remus Lupin and Sirius Black aren't mentioned at all and there is no info about what Sirius had to go through in consequence of Pettigrew accusing Sirius for the murder.
-Although I asked gpt to only create a summary based on the given sentences, "Hermione" is mentioned in the summary which is the english name of the character Hermine. So GPT isn't fully depending on the sentences I provided for this characterization.
-I utilized both the BLUE-Metric and BERT-Score to assess disparities, employing the well-known character Peter Pettigrew and the less familiar character Bathilda Bagshot and Angelina Johnson. For all three of these characters i generated characterizations. One with and one without textpassages from the book. The initial set of results, depicted in the first figure, illustrates the language model's performance solely based on its training data, devoid of any text passages revealing character information. The second figure presents findings from the subsequent test, where I incorporated all book sentences containing the character's name into the GPT query.\\
-The results showcased in the second figure are only marginally superior for Bathilda and Pattigrew to those relying exclusively on training data.
-I anticipated Angelina's and Bathilda's score in the initial figure to be rather low, given her limited popularity, stemming from a solitary mention in the final book. However, ChatGPT demonstrated a more profound understanding of the books than I had initially assumed.
-It is crucial to conduct further assessments with books beyond the language model's training set. This can be achieved by using recently published books to validate the impact and independence of additional information on the model's performance.
-\cite{post-2018-call,bert-score}
diff --git a/experiments.aux b/experiments.aux
index 01e4f95a102f2b3c217866a51fb1aef0a3e539e8..2b18d1571a2b6a318a3dbc2e6ea04b69ed83c156 100644
--- a/experiments.aux
+++ b/experiments.aux
@@ -1,38 +1,53 @@
 \relax 
 \providecommand\hyper@newdestlabel[2]{}
-\@writefile{toc}{\contentsline {chapter}{\numberline {5}Experiments}{13}{chapter.5}\protected@file@percent }
+\@writefile{toc}{\contentsline {chapter}{\numberline {5}Experiments}{15}{chapter.5}\protected@file@percent }
 \@writefile{lof}{\addvspace {10\p@ }}
 \@writefile{lot}{\addvspace {10\p@ }}
 \@writefile{lol}{\addvspace {10\p@ }}
-\@writefile{toc}{\contentsline {section}{\numberline {5.1}Base Experiment}{13}{section.5.1}\protected@file@percent }
-\@writefile{toc}{\contentsline {subsection}{\numberline {5.1.1}Results}{16}{subsection.5.1.1}\protected@file@percent }
-\newlabel{fig:prompt1}{{\caption@xref {fig:prompt1}{ on input line 298}}{16}{Results}{figure.caption.4}{}}
-\newlabel{sub@fig:prompt1}{{}{16}{Results}{figure.caption.4}{}}
-\newlabel{fig:prompt2}{{\caption@xref {fig:prompt2}{ on input line 298}}{16}{Results}{figure.caption.4}{}}
-\newlabel{sub@fig:prompt2}{{}{16}{Results}{figure.caption.4}{}}
-\newlabel{fig:prompt3}{{\caption@xref {fig:prompt3}{ on input line 298}}{16}{Results}{figure.caption.4}{}}
-\newlabel{sub@fig:prompt3}{{}{16}{Results}{figure.caption.4}{}}
-\newlabel{fig:prompt4}{{\caption@xref {fig:prompt4}{ on input line 298}}{16}{Results}{figure.caption.4}{}}
-\newlabel{sub@fig:prompt4}{{}{16}{Results}{figure.caption.4}{}}
-\@writefile{lof}{\contentsline {figure}{\numberline {5.1}{\ignorespaces Blue-Metric of Zero-Shot Characterizations generated with Llama without ($\delta $) and with passage retrieval ($\delta '$) from the literature}}{16}{figure.caption.4}\protected@file@percent }
-\newlabel{fig:enter-label}{{\caption@xref {fig:enter-label}{ on input line 406}}{17}{Results}{figure.caption.5}{}}
-\newlabel{sub@fig:enter-label}{{}{17}{Results}{figure.caption.5}{}}
-\newlabel{fig:enter-label}{{\caption@xref {fig:enter-label}{ on input line 406}}{17}{Results}{figure.caption.5}{}}
-\newlabel{sub@fig:enter-label}{{}{17}{Results}{figure.caption.5}{}}
-\newlabel{fig:enter-label}{{\caption@xref {fig:enter-label}{ on input line 406}}{17}{Results}{figure.caption.5}{}}
-\newlabel{sub@fig:enter-label}{{}{17}{Results}{figure.caption.5}{}}
-\newlabel{fig:enter-label}{{\caption@xref {fig:enter-label}{ on input line 406}}{17}{Results}{figure.caption.5}{}}
-\newlabel{sub@fig:enter-label}{{}{17}{Results}{figure.caption.5}{}}
-\@writefile{lof}{\contentsline {figure}{\numberline {5.2}{\ignorespaces BERTScore of Zero-Shot Characterizations generated with Llama without ($\delta $) and with passage retrieval ($\delta '$) from the literatur}}{17}{figure.caption.5}\protected@file@percent }
-\newlabel{fig:boxplot}{{\caption@xref {fig:boxplot}{ on input line 442}}{18}{Results}{figure.caption.6}{}}
-\newlabel{sub@fig:boxplot}{{}{18}{Results}{figure.caption.6}{}}
-\newlabel{fig:boxplot}{{\caption@xref {fig:boxplot}{ on input line 701}}{18}{Results}{figure.caption.6}{}}
-\newlabel{sub@fig:boxplot}{{}{18}{Results}{figure.caption.6}{}}
-\@writefile{lof}{\contentsline {figure}{\numberline {5.3}{\ignorespaces Boxplots of BLEU- and BERTScores for every prompt ($\delta _{1}, \delta _{1}', \delta _{2},...,\delta _{4}'$)}}{18}{figure.caption.6}\protected@file@percent }
-\@writefile{toc}{\contentsline {subsection}{\numberline {5.1.2}Analysis}{18}{subsection.5.1.2}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {5.2}Selected Embedded chunks}{20}{section.5.2}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {5.1}Base Experiment}{15}{section.5.1}\protected@file@percent }
+\newlabel{fig:prompts2}{{\caption@xref {fig:prompts2}{ on input line 35}}{16}{Base Experiment}{figure.caption.5}{}}
+\@writefile{toc}{\contentsline {subsection}{\numberline {5.1.1}Results}{18}{subsection.5.1.1}\protected@file@percent }
+\newlabel{fig:prompt1}{{\caption@xref {fig:prompt1}{ on input line 338}}{18}{Results}{figure.caption.6}{}}
+\newlabel{sub@fig:prompt1}{{}{18}{Results}{figure.caption.6}{}}
+\newlabel{fig:prompt2}{{\caption@xref {fig:prompt2}{ on input line 338}}{18}{Results}{figure.caption.6}{}}
+\newlabel{sub@fig:prompt2}{{}{18}{Results}{figure.caption.6}{}}
+\newlabel{fig:prompt3}{{\caption@xref {fig:prompt3}{ on input line 338}}{18}{Results}{figure.caption.6}{}}
+\newlabel{sub@fig:prompt3}{{}{18}{Results}{figure.caption.6}{}}
+\newlabel{fig:prompt4}{{\caption@xref {fig:prompt4}{ on input line 338}}{18}{Results}{figure.caption.6}{}}
+\newlabel{sub@fig:prompt4}{{}{18}{Results}{figure.caption.6}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.1}{\ignorespaces Blue-Metric of Zero-Shot Characterizations generated with Llama without ($P$) and with passage retrieval ($P'$) from the literature}}{18}{figure.caption.6}\protected@file@percent }
+\newlabel{fig:prompt1}{{\caption@xref {fig:prompt1}{ on input line 623}}{19}{Results}{figure.caption.7}{}}
+\newlabel{sub@fig:prompt1}{{}{19}{Results}{figure.caption.7}{}}
+\newlabel{fig:prompt2}{{\caption@xref {fig:prompt2}{ on input line 623}}{19}{Results}{figure.caption.7}{}}
+\newlabel{sub@fig:prompt2}{{}{19}{Results}{figure.caption.7}{}}
+\newlabel{fig:prompt3}{{\caption@xref {fig:prompt3}{ on input line 623}}{19}{Results}{figure.caption.7}{}}
+\newlabel{sub@fig:prompt3}{{}{19}{Results}{figure.caption.7}{}}
+\newlabel{fig:prompt4}{{\caption@xref {fig:prompt4}{ on input line 623}}{19}{Results}{figure.caption.7}{}}
+\newlabel{sub@fig:prompt4}{{}{19}{Results}{figure.caption.7}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.2}{\ignorespaces BERTScore of Zero-Shot Characterizations generated with Llama without ($P$) and with passage retrieval ($P'$) from the literatur}}{19}{figure.caption.7}\protected@file@percent }
+\newlabel{fig:boxplot}{{\caption@xref {fig:boxplot}{ on input line 661}}{20}{Results}{figure.caption.8}{}}
+\newlabel{sub@fig:boxplot}{{}{20}{Results}{figure.caption.8}{}}
+\newlabel{fig:boxplot}{{\caption@xref {fig:boxplot}{ on input line 920}}{20}{Results}{figure.caption.8}{}}
+\newlabel{sub@fig:boxplot}{{}{20}{Results}{figure.caption.8}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.3}{\ignorespaces Boxplots of BLEU- and BERTScores for every prompt ($P_{1}, P_{1}', P_{2},...,P_{4}'$)}}{20}{figure.caption.8}\protected@file@percent }
+\newlabel{fig:prompts2}{{\caption@xref {fig:prompts2}{ on input line 943}}{20}{Results}{figure.caption.9}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.4}{\ignorespaces T-Test and spearman correlation with according p-values after prompting llama3 and gemma2 with $P_{1}$ with and without a selection of embedded text chunks for passage retrieval}}{20}{figure.caption.9}\protected@file@percent }
+\@writefile{toc}{\contentsline {subsection}{\numberline {5.1.2}Analysis}{21}{subsection.5.1.2}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {5.2}Selected Embedded chunks}{22}{section.5.2}\protected@file@percent }
+\newlabel{fig:prompt1}{{\caption@xref {fig:prompt1}{ on input line 1109}}{22}{Selected Embedded chunks}{figure.caption.10}{}}
+\newlabel{sub@fig:prompt1}{{}{22}{Selected Embedded chunks}{figure.caption.10}{}}
+\newlabel{fig:prompt2}{{\caption@xref {fig:prompt2}{ on input line 1109}}{22}{Selected Embedded chunks}{figure.caption.10}{}}
+\newlabel{sub@fig:prompt2}{{}{22}{Selected Embedded chunks}{figure.caption.10}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.5}{\ignorespaces Blue-Metric of Zero-Shot Characterizations generated with Llama3 and Gemma2 without ($P$) and with passage retrieval ($P'$) from the literature}}{22}{figure.caption.10}\protected@file@percent }
+\newlabel{fig:prompt1}{{\caption@xref {fig:prompt1}{ on input line 1256}}{23}{Selected Embedded chunks}{figure.caption.11}{}}
+\newlabel{sub@fig:prompt1}{{}{23}{Selected Embedded chunks}{figure.caption.11}{}}
+\newlabel{fig:prompt2}{{\caption@xref {fig:prompt2}{ on input line 1256}}{23}{Selected Embedded chunks}{figure.caption.11}{}}
+\newlabel{sub@fig:prompt2}{{}{23}{Selected Embedded chunks}{figure.caption.11}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.6}{\ignorespaces BERTScore of Zero-Shot Characterizations generated with Llama3 and Gemma2 without ($P$) and with passage retrieval ($P'$) from the literature}}{23}{figure.caption.11}\protected@file@percent }
+\newlabel{fig:prompts2}{{\caption@xref {fig:prompts2}{ on input line 1281}}{23}{Selected Embedded chunks}{figure.caption.12}{}}
+\@writefile{lof}{\contentsline {figure}{\numberline {5.7}{\ignorespaces T-Test and spearman correlation with according p-values after prompting llama3 and gemma2 with $P_{1}$ with and without a selection of embedded text chunks for passage retrieval}}{23}{figure.caption.12}\protected@file@percent }
 \@setckpt{experiments}{
-\setcounter{page}{21}
+\setcounter{page}{24}
 \setcounter{equation}{0}
 \setcounter{enumi}{0}
 \setcounter{enumii}{0}
@@ -47,14 +62,14 @@
 \setcounter{subsubsection}{0}
 \setcounter{paragraph}{0}
 \setcounter{subparagraph}{0}
-\setcounter{figure}{3}
+\setcounter{figure}{7}
 \setcounter{table}{0}
 \setcounter{lstnumber}{1}
 \setcounter{section@level}{1}
 \setcounter{Item}{0}
 \setcounter{Hfootnote}{0}
 \setcounter{bookmark@seq@number}{0}
-\setcounter{caption@flags}{6}
+\setcounter{caption@flags}{2}
 \setcounter{continuedfloat}{0}
 \setcounter{subfigure}{0}
 \setcounter{subtable}{0}
@@ -67,7 +82,7 @@
 \setcounter{citetotal}{0}
 \setcounter{multicitecount}{0}
 \setcounter{multicitetotal}{0}
-\setcounter{instcount}{4}
+\setcounter{instcount}{0}
 \setcounter{maxnames}{3}
 \setcounter{minnames}{1}
 \setcounter{maxitems}{3}
diff --git a/experiments.tex b/experiments.tex
index bc9da62ccf303bbac150407ae39f44e24ae39051..1ff88c28aa2e74f326357abef58e017f00b42077 100644
--- a/experiments.tex
+++ b/experiments.tex
@@ -1,201 +1,59 @@
 \chapter{Experiments}
 
-All my experiments have been conducted partially on my own computer but also on a remote server from the LT group at the University of Hamburg. This was mainly due to accessing a better GPU like the RTX A1000 NVIDEA for prompting LLMS and creating embeddings.
+All my experiments have been conducted partially on my own computer but also over ssh on a remote server from the LT group at the University of Hamburg. This was mainly due to accessing a better GPU like the NVIDEA RTX A6000 for doing more computational intensive work such as prompting large language models (LLMs) and generating embeddings.
 
 
 \section{Base Experiment}
 
-For my first experiment, I formulated four prompts with slightly different wordings to observe how varying prompts affect the outcomes of the LLM. For each prompt, I tested two versions: one with additional passages from the literature providing information about the character, and one without such information, requiring the model to rely solely on its training data. ``[INST]'' and ``[\textbackslash INST]'' mark the start and end of each query instruction. ``\{character\}'' and ``\{book\}'' will be replaced with the real character name and book title. ``\{passages\}'' marks the spot where a collection of retrieved passages from the book for the given character that might help the Llama model with its characterizations will be passed into the prompt.\\
-
+For my first experiment, I formulated four prompts with slightly different wordings to observe how varying prompts affect the outcomes of the LLM. For each prompt, I tested two versions: one with additional passages from the literature providing information about the character, and one without such information, requiring the model to rely solely on its training data. All the eight raw prompts \ref*{fig:prompt2} contain tags. These tags will be interpreted as following. ``[INST]'' and ``[\textbackslash INST]'' mark the start and end of each query instruction. ``\{character\}'' and ``\{book\}'' will be replaced with the real character name and book title. ``\{passages\}'' marks the spot where a collection of retrieved passages from the book for the given character that should help the Llama model with its characterizations will be passed into the prompt.\\
+
+
+\begin{figure}
+    \begin{center}
+        \begin{tabular}{|c|m{10cm}|}
+            \hline
+            Prompt      & Instruction                                                                                                                                                 \\ [0.5ex]
+            \hline\hline
+            $P_{1}^{z}$ & "[INST]Write a summary about the character \{character\} in the book \{book\}.[/INST]"                                                                      \\
+            \hline
+            $P_{1}^{r}$ & "[INST]Write a summary about the character \{character\} in the given text passages: \textbackslash n \{passages\}[/INST]"                                  \\
+            \hline\hline
+            $P_{2}^{z}$ & "[INST]Write a summary in the style of a fandom article about the character \{character\} in the book \{book\}.[/INST]"                                     \\
+            \hline
+            $P_{2}^{r}$ & "[INST]Write a summary in the style of a fandom article about the character \{character\} in the given text passages: \textbackslash n \{passages\}[/INST]" \\
+            \hline\hline
+            $P_{3}^{z}$ & "[INST]Provide a concise overview of the character \{character\} from the book \{book\}.[/INST]"                                                            \\
+            \hline
+            $P_{3}^{r}$ & "[INST]Provide a concise overview of the character \{character\} based on the following excerpts: \textbackslash n \{passages\}[/INST]"                     \\
+            \hline\hline
+            $P_{4}^{z}$ & "[INST]rite sumary bout thee cara cter \{character\} of th book \{book\}.[/INST]"                                                                           \\
+            \hline
+            $P_{4}^{r}$ & "[INST]rite sumary bout thee cara cter \{character\} bsed th fllowing excerpts: \textbackslash n \{passages\}[/INST]"                                       \\ [1ex]
+            \hline
+        \end{tabular}
+    \end{center}
+    \label{fig:prompts2}
+\end{figure}
 
 
-\begin{center}
-    \begin{tabular}{|c|m{10cm}|}
-        \hline
-        Prompt        & Instruction                                                                                                                                             \\ [0.5ex]
-        \hline\hline
-        $\delta_{1}$  & "[INST]Write a summary about the character \{character\} in the book \{book\}.[/INST]"                                                                  \\
-        \hline
-        $\delta_{1}'$ & "[INST]Write a summary about the character \{character\} in the given text passages: \textbackslash n \{help\}[/INST]"                                  \\
-        \hline\hline
-        $\delta_{2}$  & "[INST]Write a summary in the style of a fandom article about the character \{character\} in the book \{book\}.[/INST]"                                 \\
-        \hline
-        $\delta_{2}'$ & "[INST]Write a summary in the style of a fandom article about the character \{character\} in the given text passages: \textbackslash n \{help\}[/INST]" \\
-        \hline\hline
-        $\delta_{3}$  & "[INST]Provide a concise overview of the character \{character\} from the book \{book\}.[/INST]"                                                        \\
-        \hline
-        $\delta_{3}'$ & "[INST]Provide a concise overview of the character \{character\} based on the following excerpts: \textbackslash n \{help\}[/INST]"                     \\
-        \hline\hline
-        $\delta_{4}$  & "[INST]rite sumary bout thee cara cter \{character\} of th book \{book\}.[/INST]"                                                                       \\
-        \hline
-        $\delta_{4}'$ & "[INST]rite sumary bout thee cara cter \{character\} bsed th fllowing excerpts: \textbackslash n \{help\}[/INST]"                                       \\ [1ex]
-        \hline
-    \end{tabular}
-\end{center}
 
 
 
-As you can see, $\delta_{2}$ is more specific, requesting the style of a fandom article, whereas $\delta_{3}$ is less precise, asking only for an overview without specifying a particular format. The last prompt is similar to $\delta_{1}$ but is intentionally faulty by missing characters.These different prompts are used to determine the overall effects of various prompt wordings and faulty instructions on the language model.
+As you can see, $P_{2}$ is more specific, requesting the style of a fandom article, whereas $P_{3}$ is less precise, asking only for an overview without specifying a particular format. The last prompt $P_{4}$ is similar to $P_{1}$ but is intentionally faulty by missing characters.These different prompts are used to determine the overall effects of various prompt wordings and faulty instructions on the language model.
 
 
 
 In this first experiment, I selected additional information from the book by filtering for every sentence in which the character's name occurred at least once. Since the number of tokens might exceed the maximum input size of the LLaMA model, I removed every $n$-th sentence, where $n$ is calculated in such a way that the query size fits perfectly.
-Additionally, because characters are more likely to be introduced in the first sentences where they appear in the book, I added an additional cutoff $\alpha$. This cutoff represents the percentage of relevant sentences (with character name occurrences) to which every sentence with name occurence will be taken, so the rule of taking every $n$-th sentence only affects sentences after the cutoff. Overall the passage retrieval for this experiment $R_{base}$ works as follows. Let $S = \{s_i \mid 1 \leq i \leq k \}$ be the set of size $k$ which contains all relevant sentences containig the character and $l$ be the maximum inputsize of the Llama query. We first definde a function $S_{t}(a, b) = \{ s_{ti} \mid a \cdot k \leq ti \leq b \cdot k \}$, that enables a range selection of sentences with a lower and upper limit and a parameter $t$ for the stepsize. If we now choose our $n$ the right way 
-\[n = \begin{cases} 
-    \left\lfloor \frac{k - \alpha k}{l}\right\rfloor & \text{if } k - \alpha k > l\\
-    1 & \text{otherwise}
-\end{cases} \] we can write $R_{base}$ as \[R_{base} = S_{1}(0, \alpha) \cup S_{n} ( \alpha, 1) \].
-I fed the prompts through the mixtral 7b model with quantized weights.
-Quantization is a method used to decrease the computational and memory demands of running inference by using low-precision data types, such as 8-bit integers (int8), instead of the standard 32-bit floating-point (float32). Using fewer bits reduces the memory storage needed for the model, theoretically lowers energy consumption, and speeds up operations like matrix multiplication through integer arithmetic. This technique also enables models to run on embedded devices, which may only support integer data types. They are different types and levels of quantization and i started with the smallest $Q2_K$ (https://huggingface.co/ikawrakow/mixtral-instruct-8x7b-quantized-gguf) weights and therfor quickest responses. For the evaluation, I used BLEUScore and BERTScore to compare the results from the prompts against manually written articles from fandom.com.\\
-
-% \begin{figure}
-% \centering
-% \begin{tikzpicture}
-% \begin{axis}[
-%     xlabel={F1},
-%     ylabel={h},
-%     legend pos=north west,
-%     grid=both,
-%     width=10cm, % Adjust width as needed
-%     height=8cm % Adjust height as needed
-% ]
-
-% \addplot+[mark=none] table [x expr=\coordindex, y=F1, col sep=comma] {ressources/data/results.csv};
-
-
-% \legend{P, R, F1, h}
-
-% \legend{Data}
-% \end{axis}
-% \end{tikzpicture}
-% \caption{Plot of F1 vs h}
-% \label{fig:f1_vs_h}
-% \end{figure}
-
-% \begin{figure}
-% \centering
-% \begin{tikzpicture}
-% \begin{axis}[
-%     xlabel={F1},
-%     ylabel={h},
-%     legend pos=north west,
-%     grid=both,
-%     width=10cm, % Adjust width as needed
-%     height=8cm % Adjust height as needed
-% ]
-
-
-% \addplot+[mark=none] table [x expr=\coordindex, y=wF1, col sep=comma] {ressources/data/results.csv};
-
-% \legend{P, R, F1, h}
-
-% \legend{Data}
-% \end{axis}
-% \end{tikzpicture}
-% \caption{Plot of F1 vs h}
-% \label{fig:f1_vs_h}
-% \end{figure}
-
-% \subsection{Results}
-% \begin{figure}[H]
-%     \centering
-%     \begin{tikzpicture}
-%         \begin{axis}[
-%                 xlabel={F1},
-%                 ylabel={h},
-%                 legend pos=north west,
-%                 grid=both,
-%                 width=10cm, % Adjust width as needed
-%                 height=8cm % Adjust height as needed
-%             ]
-
-
-%             \addplot+[mark=none] table [x expr=\coordindex, y=h, col sep=comma] {ressources/data/results.csv};
-
-%             \legend{P, R, F1, h}
-
-%             \legend{Data}
-%         \end{axis}
-%     \end{tikzpicture}
-%     \caption{Plot of F1 vs h}
-%     \label{fig:f1_vs_h}
-% \end{figure}
-
-\subsection{Results}
-% \begin{figure}[H]
-%     \centering
-%     \makebox[\textwidth][c]{
-%         \begin{minipage}{1.1\textwidth}
-%             \begin{subfigure}[b]{0.45\textwidth}
-%                 \centering
-%                 \begin{tikzpicture}
-%                     \begin{axis}[
-%                             x tick label style={
-%                                     /pgf/number format/1000 sep=},
-%                             xlabel=BLEUScore,
-%                             ylabel=Amount,
-%                             enlargelimits=0.05,
-%                             legend style={at={(1.0,-0.2)},
-%                                     anchor=north,legend columns=-1},
-%                             ybar interval=0.7,
-%                         ]
-%                         \addplot
-%                         coordinates {(0.0,1) (1.4,288) (2.8,50) (4.3,15) (5.7,6) (7.2,1) (8.6,1) (11.5,1) };
-%                         \addplot
-%                         coordinates {(0.0,0) (1.4,266) (2.8,57) (4.3,24) (5.7,8) (7.2,7) (8.6,2) (11.5,0) };
-
-%                         \legend{$\delta'$,$\delta$}
-%                     \end{axis}
-%                 \end{tikzpicture}
-%                 \label{fig:enter-label}
-%             \end{subfigure}
-%             \hfill
-%             \begin{subfigure}[b]{0.45\textwidth}
-%                 \centering
-%                 \begin{tikzpicture}
-%                     \begin{axis}[
-%                             x tick label style={
-%                                     /pgf/number format/1000 sep=},
-%                             xlabel=BERTScore,
-%                             enlargelimits=0.05,
-%                             legend style={at={(1.0,-0.2)},
-%                                     anchor=north,legend columns=-1},
-%                             ybar interval=0.7,
-%                         ]
-%                         \addplot
-%                         coordinates {(0.38,0) (0.43,4) (0.47,46) (0.52,157) (0.56,119)
-%                                 (0.61,31) (0.65,0) };
-%                         \addplot
-%                         coordinates {(0.38,0) (0.43,0) (0.47,11) (0.52,90) (0.56,197)
-%                                 (0.61,60) (0.65,0) };
-
-%                         \legend{$\delta'$,$\delta$}
-%                     \end{axis}
-%                 \end{tikzpicture}
-%                 \label{fig:enter-label}
-%             \end{subfigure}
-%         \end{minipage}
-%     }
-%     \caption{Distribution of BERTScore and Blue-Metric without ($\delta$) and with passage retrieval ($\delta'$) in bins}
-% \end{figure}
-
-
-
-
-
-
-
-
-
-
-
-
-
-% ScatterPLOTS
-
-
+Additionally, because characters are more likely to be introduced in the first sentences where they appear in the book, I added an additional cutoff $\alpha$. This cutoff represents the percentage of relevant sentences (with character name occurrences) to which every sentence with name occurence will be taken, so the rule of taking every $n$-th sentence only affects sentences after the cutoff. Overall the passage retrieval for this experiment $R_{base}$ works as follows. Let $S = \{s_i \mid 1 \leq i \leq k \}$ be the set of size $k$ which contains all relevant sentences containig the character and $l$ be the maximum inputsize of the Llama query. We first definde a function $S_{t}(a, b) = \{ s_{ti} \mid a \cdot k \leq ti \leq b \cdot k \}$, that enables a range selection of sentences with a lower and upper limit and a parameter $t$ for the stepsize. If we now choose our $n$ the right way
+\[n = \begin{cases}
+        \left\lfloor \frac{k - \alpha k}{l}\right\rfloor & \text{if } k - \alpha k > l \\
+        1                                                & \text{otherwise}
+    \end{cases} \] we can write $R_{base}$ as \[R_{base} = S_{1}(0, \alpha) \cup S_{n} ( \alpha, 1) \].
+I utilized the Mixtral 7B model with quantized weights to process the prompts. Quantization is a technique designed to reduce the computational and memory demands of running inference by using low-precision data types, such as 8-bit integers (int8), instead of the standard 32-bit floating-point (float32). This approach decreases memory storage requirements, theoretically lowers energy consumption, and accelerates operations like matrix multiplication through integer arithmetic. Moreover, it enables models to operate on embedded devices that may only support integer data types. I started with the smallest quantization level, $Q2_K$ weights, to achieve the quickest responses. For evaluation, I used BLEUScore and BERTScore to compare the generated results against manually written articles from fandom.com.\\
 
+For the analysis of the results, I decided to use boxplots, t-tests, and Spearman correlation. To quickly summarize, a paired t-test compares the means of two related groups to determine if there is a statistically significant difference between these means. I used them to have quantitative proof that the results improved after passage retrieval. A boxplot segregates the data into four parts by determining the three quartiles. Additionally, with the added histograms, it allows a better overview of the distribution of the data than just looking at the mean or median. The Spearman correlation evaluates monotonic relationships between two ranked variables, which helps to identify the general tendency of the passage retrieval.
 
+\subsection{Results}
 \begin{figure}[H]
     \centering
     \makebox[\textwidth][c]{
@@ -204,22 +62,67 @@ Quantization is a method used to decrease the computational and memory demands o
             \begin{subfigure}[b]{0.45\textwidth}
                 \centering
                 \begin{tikzpicture}
+                    % Main axis (scatter plot)
                     \begin{axis}[
                             enlargelimits=false,
                             title={Prompt 1},
-                            xlabel={$BLEU(\delta)$},
-                            ylabel={$BLEU(\delta')$},
+                            title style={at={(0.2,1.2)}, anchor=north},
+                            xlabel={$BERT(P_{1}^{z})$},
+                            ylabel={$BERT(P^{r})$},
                             xmin=0.35, xmax=0.65,
                             ymin=0.35, ymax=0.65,
-                            xtick distance=0.1, ytick distance=0.1
+                            xtick distance=0.1, ytick distance=0.1,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
                         ]
                         \addplot[color=black, thick, dotted] {x};
                         \addplot+[
                             color=blue,
                             only marks,
                             mark=o,
-                            mark size=1.0pt]
-                        table [col sep=comma, x=F1, y=wF1] {ressources/data/results.csv};
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0.35, xmax=0.65
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0.35, ymax=0.65
+                        ]
+                        \addplot [
+                            hist={data min=0.35, data max=0.65, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results.csv};
                     \end{axis}
                 \end{tikzpicture}
                 \label{fig:prompt1}
@@ -228,21 +131,67 @@ Quantization is a method used to decrease the computational and memory demands o
             \begin{subfigure}[b]{0.45\textwidth}
                 \centering
                 \begin{tikzpicture}
+                    % Main axis (scatter plot)
                     \begin{axis}[
                             enlargelimits=false,
                             title={Prompt 2},
-                            xlabel={$BLEU(\delta)$},
+                            title style={at={(0.2,1.2)}, anchor=north},
+                            xlabel={$BERT(P_{2}^{z})$},
+                            % ylabel={$BLEU(P')$},
                             xmin=0.35, xmax=0.65,
                             ymin=0.35, ymax=0.65,
-                            xtick distance=0.1, ytick distance=0.1
+                            xtick distance=0.1, ytick distance=0.1,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
                         ]
                         \addplot[color=black, thick, dotted] {x};
                         \addplot+[
                             color=blue,
                             only marks,
                             mark=o,
-                            mark size=1.0pt]
-                        table [col sep=comma, x=F1, y=wF1] {ressources/data/results2.csv};
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results2.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0.35, xmax=0.65
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution, % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results2.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0.35, ymax=0.65
+                        ]
+                        \addplot [
+                            hist={data min=0.35, data max=0.65, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results2.csv};
                     \end{axis}
                 \end{tikzpicture}
                 \label{fig:prompt2}
@@ -251,22 +200,67 @@ Quantization is a method used to decrease the computational and memory demands o
             \begin{subfigure}[b]{0.45\textwidth}
                 \centering
                 \begin{tikzpicture}
+                    % Main axis (scatter plot)
                     \begin{axis}[
                             enlargelimits=false,
                             title={Prompt 3},
-                            xlabel={$BLEU(\delta)$},
-                            ylabel={$BLEU(\delta')$},
+                            title style={at={(0.2,1.2)}, anchor=north},
+                            xlabel={$BERT(P_{3}^{z})$},
+                            ylabel={$BERT(P^{r})$},
                             xmin=0.35, xmax=0.65,
                             ymin=0.35, ymax=0.65,
-                            xtick distance=0.1, ytick distance=0.1
+                            xtick distance=0.1, ytick distance=0.1,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
                         ]
-                        \addplot[color=black, thick, dotted, domain=0:16] {x};
+                        \addplot[color=black, thick, dotted] {x};
                         \addplot+[
                             color=blue,
                             only marks,
                             mark=o,
-                            mark size=1.0pt]
-                        table [col sep=comma, x=F1, y=wF1] {ressources/data/results3.csv};
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results3.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0.35, xmax=0.65
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution, % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results3.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0.35, ymax=0.65
+                        ]
+                        \addplot [
+                            hist={data min=0.35, data max=0.65, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results3.csv};
                     \end{axis}
                 \end{tikzpicture}
                 \label{fig:prompt3}
@@ -275,140 +269,365 @@ Quantization is a method used to decrease the computational and memory demands o
             \begin{subfigure}[b]{0.45\textwidth}
                 \centering
                 \begin{tikzpicture}
+                    % Main axis (scatter plot)
                     \begin{axis}[
                             enlargelimits=false,
                             title={Prompt 4},
-                            xlabel={$BLEU(\delta)$},
+                            title style={at={(0.2,1.2)}, anchor=north},
+                            xlabel={$BERT(P_{4}^{z})$},
+                            % ylabel={$BLEU(P')$},
                             xmin=0.35, xmax=0.65,
                             ymin=0.35, ymax=0.65,
-                            xtick distance=0.1, ytick distance=0.1
+                            xtick distance=0.1, ytick distance=0.1,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
                         ]
-                        \addplot[color=black, thick, dotted, domain=0:16] {x};
+                        \addplot[color=black, thick, dotted] {x};
                         \addplot+[
                             color=blue,
                             only marks,
                             mark=o,
-                            mark size=1.0pt]
-                        table [col sep=comma, x=F1, y=wF1] {ressources/data/results4.csv};
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results4.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0.35, xmax=0.65
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution, % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results4.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0.35, ymax=0.65
+                        ]
+                        \addplot [
+                            hist={data min=0.35, data max=0.65, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/results4.csv};
                     \end{axis}
                 \end{tikzpicture}
                 \label{fig:prompt4}
             \end{subfigure}
         \end{minipage}
     }
-    \caption{Blue-Metric of Zero-Shot Characterizations generated with Llama without ($\delta$) and with passage retrieval ($\delta'$) from the literature}
+    \caption{Blue-Metric of Zero-Shot Characterizations generated with Llama without ($P$) and with passage retrieval ($P'$) from the literature}
 \end{figure}
 
 \begin{figure}[H]
     \centering
     \makebox[\textwidth][c]{
         \begin{minipage}{1.1\textwidth}
+            \centering
             \begin{subfigure}[b]{0.45\textwidth}
                 \centering
                 \begin{tikzpicture}
+                    % Main axis (scatter plot)
                     \begin{axis}[
                             enlargelimits=false,
                             title={Prompt 1},
-                            xlabel={$BERT(\delta)$},
-                            ylabel={$BERT(\delta')$},
+                            xlabel={$BERT(P_{1}^{z})$},
+                            ylabel={$BERT(P^{r})$},
                             xmin=0, xmax=16,
-                            ymin=0, ymax=16
+                            ymin=0, ymax=16,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
                         ]
-                        \addplot[color=black, thick, dotted, domain=0:16] {x};
+                        \addplot[color=black, thick, dotted] {x};
                         \addplot+[
                             color=blue,
                             only marks,
                             mark=o,
-                            mark size=1.0pt]
-                        table [col sep=comma, x=h, y=wh]
-                            {ressources/data/results.csv};
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0, xmax=16,
+                            ymin=0, ymax=400
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution, % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0, ymax=16,
+                            xmin=0, xmax=400
+                        ]
+                        \addplot [
+                            hist={data min=0, data max=16, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results.csv};
                     \end{axis}
                 \end{tikzpicture}
-                % \caption{Blue-Metric of Characterizations generated with Llama prompt 1 with and without additional textpassages from the literatur}
-                \label{fig:enter-label}
+                \label{fig:prompt1}
             \end{subfigure}
             \hfill
             \begin{subfigure}[b]{0.45\textwidth}
                 \centering
                 \begin{tikzpicture}
+                    % Main axis (scatter plot)
                     \begin{axis}[
                             enlargelimits=false,
-                            title={Prompt 2},
-                            xlabel={$BERT(\delta)$},
-                            % ylabel={prompt with additional textpassages},
+                            title={Prompt 1},
+                            xlabel={$BERT(P_{2}^{z})$},
+                            % ylabel={$BERT(P')$},
                             xmin=0, xmax=16,
-                            ymin=0, ymax=16
+                            ymin=0, ymax=16,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
                         ]
-                        \addplot[color=black, thick, dotted, domain=0:16] {x};
+                        \addplot[color=black, thick, dotted] {x};
                         \addplot+[
                             color=blue,
                             only marks,
                             mark=o,
-                            mark size=1.0pt]
-                        table [col sep=comma, x=h, y=wh]
-                            {ressources/data/results.csv};
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results2.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0, xmax=16,
+                            ymin=0, ymax=400
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution, % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=h, y=wh]{ressources/data/results2.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0, ymax=16,
+                            xmin=0, xmax=400
+                        ]
+                        \addplot [
+                            hist={data min=0, data max=16, handler/.style={xbar interval}, bins=30}, % Use ybar interval % Increase number of bins for better resolution
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results2.csv};
                     \end{axis}
                 \end{tikzpicture}
-                % \caption{Blue-Metric of Characterizations generated with Llama prompt 1 with and without additional textpassages from the literatur}
-                \label{fig:enter-label}
+                \label{fig:prompt2}
             \end{subfigure}
             \vspace{0.5cm}
             \begin{subfigure}[b]{0.45\textwidth}
                 \centering
                 \begin{tikzpicture}
+                    % Main axis (scatter plot)
                     \begin{axis}[
                             enlargelimits=false,
-                            title={Prompt 3},
-                            xlabel={$BERT(\delta)$},
-                            ylabel={$BERT(\delta')$},
+                            title={Prompt 1},
+                            xlabel={$BERT(P_{3}^{z})$},
+                            ylabel={$BERT(P^{r})$},
                             xmin=0, xmax=16,
-                            ymin=0, ymax=16
+                            ymin=0, ymax=16,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
                         ]
-                        \addplot[color=black, thick, dotted, domain=0:16] {x};
+                        \addplot[color=black, thick, dotted] {x};
                         \addplot+[
                             color=blue,
                             only marks,
                             mark=o,
-                            mark size=1.0pt]
-                        table [col sep=comma, x=h, y=wh]
-                            {ressources/data/results.csv};
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results3.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0, xmax=16,
+                            ymin=0, ymax=400
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution, % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results3.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0, ymax=16,
+                            xmin=0, xmax=400
+                        ]
+                        \addplot [
+                            hist={data min=0, data max=16, handler/.style={xbar interval}, bins=30}, % Use ybar interval % Increase number of bins for better resolution
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results3.csv};
                     \end{axis}
                 \end{tikzpicture}
-                % \caption{BERTScore of Characterizations generated with Llama prompt 1 with and without additional textpassages from the literatur}
-                \label{fig:enter-label}
+                \label{fig:prompt3}
             \end{subfigure}
             \hfill
             \begin{subfigure}[b]{0.45\textwidth}
                 \centering
                 \begin{tikzpicture}
+                    % Main axis (scatter plot)
                     \begin{axis}[
                             enlargelimits=false,
-                            title={Prompt 4},
-                            xlabel={$BERT(\delta)$},
-                            % ylabel={prompt with additional textpassages},
+                            title={Prompt 1},
+                            xlabel={$BERT(P_{4}^{z})$},
+                            % ylabel={$BERT(P')$},
                             xmin=0, xmax=16,
-                            ymin=0, ymax=16
+                            ymin=0, ymax=16,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
                         ]
-                        \addplot[color=black, thick, dotted, domain=0:16] {x};
+                        \addplot[color=black, thick, dotted] {x};
                         \addplot+[
                             color=blue,
                             only marks,
                             mark=o,
-                            mark size=1.0pt]
-                        table [col sep=comma, x=h, y=wh]
-                            {ressources/data/results.csv};
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results4.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0, xmax=16,
+                            ymin=0, ymax=400
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution, % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results4.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0, ymax=16,
+                            xmin=0, xmax=400
+                        ]
+                        \addplot [
+                            hist={data min=0, data max=16, handler/.style={xbar interval}, bins=30}, % Use ybar interval % Increase number of bins for better resolution
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/results4.csv};
                     \end{axis}
                 \end{tikzpicture}
-                % \caption{BERTScore of Characterizations generated with Llama prompt 1 with and without additional textpassages from the literatur}
-                \label{fig:enter-label}
+                \label{fig:prompt4}
             \end{subfigure}
         \end{minipage}
     }
-    \caption{BERTScore of Zero-Shot Characterizations generated with Llama without ($\delta$) and with passage retrieval ($\delta'$) from the literatur}
+
+    \caption{BERTScore of Zero-Shot Characterizations generated with Llama without ($P$) and with passage retrieval ($P'$) from the literatur}
 \end{figure}
 
 
 
+
 %Other
 
 
@@ -430,7 +649,7 @@ Quantization is a method used to decrease the computational and memory demands o
                     cycle list={{blue}},
                     xtick = {1,2,3,4,5,6,7,8},
                     xticklabel style = {align=center, font=\small},
-                    xticklabels = {$\delta_{1}$, $\delta_{1}'$, $\delta_{2}$, $\delta_{2}'$, $\delta_{3}$, $\delta_{3}'$, $\delta_{4}$, $\delta_{4}'$},
+                    xticklabels = {$P_{1}$, $P_{1}'$, $P_{2}$, $P_{2}'$, $P_{3}$, $P_{3}'$, $P_{4}$, $P_{4}'$},
                     xtick style = {draw=none},
                     ylabel = {$BLEU$}
                 ]
@@ -582,7 +801,7 @@ Quantization is a method used to decrease the computational and memory demands o
                 \nextgroupplot[
                     xtick = {1,2,3,4,5,6,7,8},
                     xticklabel style = {align=center, font=\small},
-                    xticklabels = {$\delta_{1}$, $\delta_{1}'$, $\delta_{2}$, $\delta_{2}'$, $\delta_{3}$, $\delta_{3}'$, $\delta_{4}$, $\delta_{4}'$},
+                    xticklabels = {$P_{1}$, $P_{1}'$, $P_{2}$, $P_{2}'$, $P_{3}$, $P_{3}'$, $P_{4}$, $P_{4}'$},
                     xtick style = {draw=none},
                     axis x line* = bottom,
                     axis y line = left,
@@ -701,23 +920,366 @@ Quantization is a method used to decrease the computational and memory demands o
         \label{fig:boxplot}
     \end{subfigure}
     \vspace{1cm}
-    \caption{Boxplots of BLEU- and BERTScores for every prompt ($\delta_{1}, \delta_{1}', \delta_{2},...,\delta_{4}'$)}
+    \caption{Boxplots of BLEU- and BERTScores for every prompt ($P_{1}, P_{1}', P_{2},...,P_{4}'$)}
+\end{figure}
+
+\begin{figure}[H]
+    \begin{center}
+        \begin{tabular}{|c|c||c|c|c|c|}
+            \hline
+            Prompt  & Heuristic & T-Test            & Spearman Correlation \\ [0.5ex]
+            \hline\hline
+            $P_{1}$ & BLEU      & -15.0 (6.65e-40)  & 0.62 (3.46e-40)      \\
+            $P_{1}$ & BERT      & -3.04 (2.56e-03)  & 0.91 (4.34e-142)     \\
+            $P_{2}$ & BLEU      & -4.2 (3.08e-05)   & 0.74 (1.23e-95)      \\
+            $P_{2}$ & BERT      & -2.58 (1.00e-02)  & 0.92 (5.12e-224)     \\
+            $P_{3}$ & BLEU      & -15.52 (4.30e-45) & 0.7 (1.76e-79)       \\
+            $P_{3}$ & BERT      & -2.06 (3.94e-02)  & 0.92 (2.51e-218)     \\
+            $P_{4}$ & BLEU      & -20.65 (4.30e-70) & 0.61 (6.66e-57)      \\
+            $P_{4}$ & BERT      & -4.0 (7.13e-05)   & 0.92 (2.14e-220)     \\
+            \hline\hline
+        \end{tabular}
+    \end{center}
+    \label{fig:prompts2}
+    \caption{T-Test and spearman correlation with according p-values after prompting llama3 and gemma2 with $P_{1}$ with and without a selection of embedded text chunks for passage retrieval}
 \end{figure}
 
 
 
 \subsection{Analysis}
-Obviously the method of passage retrieval used for this experiment isn't ideal, as regularly eliminating sentences could omit important context, also at this stage, the process of fetching fandom articles wasn't complete, resulting in a dataset with some duplicates and missing characterizations. Despite these limitations, the data is still sufficient to show two important aspects of the data. First, the results with passage retrieval are at least as good as, or already slightly better than, those without. Second, the results vary only slightly across the four different prompts. Befor we investigate that further lets have an more detailed look at the results.\\
-
-As we can see, the BLEU scores of each prompt mostly improve after passage retrieval. Although the maximum values of $\delta_{1}$ and $\delta_{2}$ have decreased slightly in $\delta_{1}'$ and $\delta_{2}'$, the minimum values, Q1, and Q3 have significantly increased, as observed in the box plots. For both $\delta_{3}$ and $\delta_{4}$, every box plot quartile has improved.\\
+Having a more detailed look at the results, we can see that the BLEU scores of each prompt mostly improve after passage retrieval. Although the maximum values of $P_{1}^{z}$ and $P_{2}^{z}$ have decreased slightly in $P_{1}^{r}$ and $P_{2}^{r}$, the minimum values, Q1, and Q3 have significantly increased, as observed in the box plots. For both $P_{3}^{z}$ and $P_{4}^{z}$, every box plot quartile has improved.\\
 
 For BERTScore, the improvement isn't quite as visible. In fact, the upper quartiles have a lower maximum after passage retrieval, but Q1-Q3 has improved slightly for every prompt. Consequently, the results are more compact. Some outliers close to the maximum in Q4 might score so high prior to passage retrieval due to Llama being trained on similar information to the fandom articles. Especially when generating summaries for main characters, Llama might already have a great knowledge base for that character, and relying solely on the additional passed sentences might therefore be hindering in generating a good characterization.\\
 
-In summary, semantically, the results have only improved slightly and the different wordings in the prompts definately have an influence on the results average and variance (ref figure).
+Obviously the method of passage retrieval used for this experiment isn't ideal, as regularly eliminating sentences could omit important context, also at this stage, the process of fetching fandom articles wasn't complete, resulting in a dataset with some duplicates and missing characterizations. Despite these limitations, the data is still sufficient enough to show two important aspects. First, the results with passage retrieval are at least as good as, or already slightly better than, those without. The similarity of the vocabulary has increased quite significantly whereas semantics seem only to have improved slightly.
 
+Second, the results and the different wordings in the prompts definately have an influence on the results average and variance \ref*{fig:enter-label}. Nevertheless choosing the right prompt for this task isn't as simple as choosing the results with the highest score average, a low variance is even more crucial since testifies a higher precision and is therefore a more accurate prompt for archieving the desired output. Based on this deduction and the observations of BERT- and BLEUScore i will continue the next experiment with prompt $P_{2}$.\\since it had the highest results in both metrics and will improve on the method for passage retrieval.
 
 
 \newpage
 \section{Selected Embedded chunks}
 
-We will now continue with prompt 1 from the base experiment since it had the highest results in both metrics. We will now improve on the method for passage retrieval.
\ No newline at end of file
+We will now continue with $P_{1}$ from the base experiment. Instead of selecting $n$ sentences that just containt the name, we first chunk each books into roughly 1000 character big chunks and then use BERT to create embeddings. Then we are trying to retrieve the chunks that describe the character best.
+
+
+
+\begin{figure}[H]
+    \centering
+    \makebox[\textwidth][c]{
+        \begin{minipage}{1.1\textwidth}
+            \centering
+            \begin{subfigure}[b]{0.45\textwidth}
+                \centering
+                \begin{tikzpicture}
+                    % Main axis (scatter plot)
+                    \begin{axis}[
+                            enlargelimits=false,
+                            title={(Llama3)},
+                            title style={at={(0.1,1.2)}, anchor=north},
+                            xlabel={$BLEU(P_{1}^{z})$},
+                            ylabel={$BLEU(P^{r})$},
+                            xmin=0.35, xmax=0.65,
+                            ymin=0.35, ymax=0.65,
+                            xtick distance=0.1, ytick distance=0.1,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
+                        ]
+                        \addplot[color=black, thick, dotted] {x};
+                        \addplot+[
+                            color=blue,
+                            only marks,
+                            mark=o,
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/normal_eval.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0.35, xmax=0.65
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution, % Use ybar interval
+                            fill=gray!50
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/normal_eval.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0.35, ymax=0.65
+                        ]
+                        \addplot [
+                            hist={data min=0.35, data max=0.65, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/normal_eval.csv};
+                    \end{axis}
+                \end{tikzpicture}
+                \label{fig:prompt1}
+            \end{subfigure}
+            \hfill
+            \begin{subfigure}[b]{0.45\textwidth}
+                \centering
+                \begin{tikzpicture}
+                    % Main axis (scatter plot)
+                    \begin{axis}[
+                            enlargelimits=false,
+                            title={(Gemma2)},
+                            title style={at={(0.1,1.2)}, anchor=north},
+                            xlabel={$BLEU(P_{1}^{z})$},
+                            % ylabel={$BLEU(P')$},
+                            xmin=0.35, xmax=0.65,
+                            ymin=0.35, ymax=0.65,
+                            xtick distance=0.1, ytick distance=0.1,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
+                        ]
+                        \addplot[color=black, thick, dotted] {x};
+                        \addplot+[
+                            color=blue,
+                            only marks,
+                            mark=o,
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/normal_eval2.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0.35, xmax=0.65
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30},
+                            fill=gray!50
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/normal_eval2.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0.35, ymax=0.65
+                        ]
+                        \addplot [
+                            hist={data min=0.35, data max=0.65, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=F1, y=wF1] {ressources/data/normal_eval2.csv};
+                    \end{axis}
+                \end{tikzpicture}
+                \label{fig:prompt2}
+            \end{subfigure}
+        \end{minipage}
+    }
+    \caption{Blue-Metric of Zero-Shot Characterizations generated with Llama3 and Gemma2 without ($P$) and with passage retrieval ($P'$) from the literature}
+\end{figure}
+
+\begin{figure}[H]
+    \centering
+    \makebox[\textwidth][c]{
+        \begin{minipage}{1.1\textwidth}
+            \centering
+            \begin{subfigure}[b]{0.45\textwidth}
+                \centering
+                \begin{tikzpicture}
+                    % Main axis (scatter plot)
+                    \begin{axis}[
+                            enlargelimits=false,
+                            title={(Llama3)},
+                            xlabel={$BERT(P_{1}^{z})$},
+                            ylabel={$BERT(P^{r})$},
+                            xmin=0, xmax=16,
+                            ymin=0, ymax=16,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
+                        ]
+                        \addplot[color=black, thick, dotted] {x};
+                        \addplot+[
+                            color=blue,
+                            only marks,
+                            mark=o,
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/normal_eval.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0, xmax=16,
+                            ymin=0, ymax=400
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30},
+                            fill=gray!50
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/normal_eval.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0, ymax=16,
+                            xmin=0, xmax=400
+                        ]
+                        \addplot [
+                            hist={data min=0, data max=16, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/normal_eval.csv};
+                    \end{axis}
+                \end{tikzpicture}
+                \label{fig:prompt1}
+            \end{subfigure}
+            \hfill
+            \begin{subfigure}[b]{0.45\textwidth}
+                \centering
+                \begin{tikzpicture}
+                    % Main axis (scatter plot)
+                    \begin{axis}[
+                            enlargelimits=false,
+                            title={(Gemma2)},
+                            xlabel={$BERT(P_{1}^{z})$},
+                            % ylabel={$BERT(P')$},
+                            xmin=0, xmax=16,
+                            ymin=0, ymax=16,
+                            name=main axis,
+                            width=\textwidth,
+                            height=\textwidth
+                        ]
+                        \addplot[color=black, thick, dotted] {x};
+                        \addplot+[
+                            color=blue,
+                            only marks,
+                            mark=o,
+                            mark size=1.0pt
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/normal_eval2.csv};
+                    \end{axis}
+
+                    % Histogram for the x axis
+                    \begin{axis}[
+                            anchor=south west,
+                            at={(main axis.north west)},
+                            height=3cm,
+                            width=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=bottom,
+                            axis y line*=left,
+                            axis line style={draw=none},
+                            xmin=0, xmax=16,
+                            ymin=0, ymax=400
+                        ]
+                        \addplot [
+                            hist={data=x, bins=30}, % Increase number of bins for better resolution
+                            fill=gray!50
+                        ] table [col sep=comma, x=h, y=wh]{ressources/data/normal_eval2.csv};
+                    \end{axis}
+
+                    % Histogram for the y axis
+                    \begin{axis}[
+                            anchor=north west,
+                            at={(main axis.north east)},
+                            width=3cm,
+                            height=\textwidth,
+                            xtick=\empty,
+                            ytick=\empty,
+                            axis x line*=top,
+                            axis y line*=right,
+                            axis line style={draw=none},
+                            ymin=0, ymax=16,
+                            xmin=0, xmax=400
+                        ]
+                        \addplot [
+                            hist={data min=0, data max=16, handler/.style={xbar interval}, bins=30}, % Use ybar interval
+                            fill=gray!50,
+                            x filter/.code=\pgfmathparse{rawy}, % Interpret the x values of the histogram as y values 
+                            y filter/.code=\pgfmathparse{rawx} % And vice versa
+                        ] table [col sep=comma, x=h, y=wh] {ressources/data/normal_eval2.csv};
+                    \end{axis}
+                \end{tikzpicture}
+                \label{fig:prompt2}
+            \end{subfigure}
+        \end{minipage}
+    }
+
+    \caption{BERTScore of Zero-Shot Characterizations generated with Llama3 and Gemma2 without ($P$) and with passage retrieval ($P'$) from the literature}
+\end{figure}
+
+
+
+
+\begin{figure}[H]
+    \begin{center}
+        \begin{tabular}{|c|c||c|c|c|c|}
+            \hline
+            Model  & Heuristic & T-Test            & Spearman Correlation \\ [0.5ex]
+            \hline\hline
+            llama3 & BLEU      & -10.31 (6.99e-23) & 0.55 (2.40e-43)      \\
+            \hline
+            llama3 & BERT      & -7.42 (4.52e-13)  & 0.77 (1.03e-106)     \\
+            \hline
+            Gemma2 & BLEU      & -14.29 (1.64e-39) & 0.53 (7.09e-40)      \\
+            \hline
+            Gemma2 & BERT      & -3.69 (2.43e-4)   & 0.56 (6.30e-46)      \\
+            \hline
+            \hline
+        \end{tabular}
+    \end{center}
+    \label{fig:prompts2}
+    \caption{T-Test and spearman correlation with according p-values after prompting llama3 and gemma2 with $P_{1}$ with and without a selection of embedded text chunks for passage retrieval}
+\end{figure}
+
+
diff --git a/masterthesis.aux b/masterthesis.aux
index 9a51daf60bb18cbd34f36d14389f0da645cc6acd..77cd8e8aa3d2527464762fca7e807b4e2de8a830 100644
--- a/masterthesis.aux
+++ b/masterthesis.aux
@@ -14,56 +14,52 @@
 \BKM@entry{id=1,dest={636861707465722E31},srcline={8}}{5C3337365C3337375C303030495C3030306E5C303030745C303030725C3030306F5C303030645C303030755C303030635C303030745C303030695C3030306F5C3030306E}
 \@input{methodology.aux}
 \BKM@entry{id=2,dest={636861707465722E32},srcline={1}}{5C3337365C3337375C3030304D5C303030655C303030745C303030685C3030306F5C303030645C3030306F5C3030306C5C3030306F5C303030675C30303079}
-\BKM@entry{id=3,dest={73656374696F6E2E322E31},srcline={5}}{5C3337365C3337375C303030455C3030306D5C303030625C303030655C303030645C303030645C303030695C3030306E5C303030675C303030735C3030302F5C303030545C3030306F5C3030306B5C303030655C3030306E}
-\BKM@entry{id=4,dest={73656374696F6E2E322E32},srcline={9}}{5C3337365C3337375C303030545C303030685C303030655C3030305C3034305C303030545C303030725C303030615C3030306E5C303030735C303030665C3030306F5C303030725C3030306D5C303030655C30303072}
-\BKM@entry{id=5,dest={73756273656374696F6E2E322E322E31},srcline={17}}{5C3337365C3337375C303030455C3030306E5C303030635C3030306F5C303030645C303030655C30303072}
-\BKM@entry{id=6,dest={73756273656374696F6E2E322E322E32},srcline={26}}{5C3337365C3337375C303030445C303030655C303030635C3030306F5C303030645C303030655C30303072}
-\BKM@entry{id=7,dest={73656374696F6E2E322E33},srcline={38}}{5C3337365C3337375C303030425C303030455C303030525C30303054}
-\BKM@entry{id=8,dest={73756273656374696F6E2E322E332E31},srcline={55}}{5C3337365C3337375C303030455C3030306D5C303030625C303030655C303030645C303030645C303030695C3030306E5C303030675C30303073}
-\BKM@entry{id=9,dest={73756273656374696F6E2E322E332E32},srcline={67}}{5C3337365C3337375C303030425C303030455C303030525C303030545C303030535C303030635C3030306F5C303030725C30303065}
-\BKM@entry{id=10,dest={73656374696F6E2E322E34},srcline={88}}{5C3337365C3337375C3030304C5C3030304C5C3030304D5C30303073}
-\BKM@entry{id=11,dest={73656374696F6E2E322E35},srcline={89}}{5C3337365C3337375C303030425C3030304C5C303030455C303030555C3030302D5C303030535C303030635C3030306F5C303030725C30303065}
-\BKM@entry{id=12,dest={73656374696F6E2E322E36},srcline={93}}{5C3337365C3337375C303030515C303030755C303030655C303030725C303030795C3030305C3034305C303030675C303030655C3030306E5C303030655C303030725C303030615C303030745C303030695C3030306F5C3030306E}
+\BKM@entry{id=3,dest={73656374696F6E2E322E31},srcline={2}}{5C3337365C3337375C303030525C303030655C3030306C5C303030615C303030745C303030655C303030645C3030305C3034305C303030575C3030306F5C303030725C3030306B}
+\abx@aux@cite{0}{10.1007/3-540-36127-8_20}
+\abx@aux@segm{0}{0}{10.1007/3-540-36127-8_20}
+\abx@aux@cite{0}{brahman-etal-2021-characters-tell}
+\abx@aux@segm{0}{0}{brahman-etal-2021-characters-tell}
+\abx@aux@cite{0}{schroder-etal-2021-neural}
+\abx@aux@segm{0}{0}{schroder-etal-2021-neural}
+\BKM@entry{id=4,dest={73756273656374696F6E2E322E312E31},srcline={59}}{5C3337365C3337375C303030505C303030725C3030306F5C3030306A5C303030655C303030635C303030745C3030305C3034305C303030475C303030755C303030745C303030655C3030306E5C303030625C303030655C303030725C30303067}
+\BKM@entry{id=5,dest={73656374696F6E2E322E32},srcline={62}}{5C3337365C3337375C303030525C303030415C30303047}
+\BKM@entry{id=6,dest={73656374696F6E2E322E33},srcline={67}}{5C3337365C3337375C303030515C303030755C303030655C303030725C303030795C3030305C3034305C303030675C303030655C3030306E5C303030655C303030725C303030615C303030745C303030695C3030306F5C3030306E}
 \abx@aux@cite{0}{schroder-etal-2021-neural}
 \abx@aux@segm{0}{0}{schroder-etal-2021-neural}
 \abx@aux@cite{0}{dobrovolskii-2021-word}
 \abx@aux@segm{0}{0}{dobrovolskii-2021-word}
-\BKM@entry{id=13,dest={73656374696F6E2E322E37},srcline={121}}{5C3337365C3337375C303030775C303030655C303030695C303030675C303030685C303030745C303030735C3030305C3034305C303030715C303030755C303030615C3030306E5C303030745C303030695C3030307A5C303030655C303030645C3030305C3034305C303030755C3030306E5C303030695C303030735C303030655C303030725C303030765C303030655C303030335C303030725C3030305C3034305C3030306C5C3030306C5C303030615C3030306D5C303030615C3030302E5C303030635C303030705C30303070}
 \@input{related_work.aux}
-\BKM@entry{id=14,dest={636861707465722E33},srcline={1}}{5C3337365C3337375C303030525C303030655C3030306C5C303030615C303030745C303030655C303030645C3030305C3034305C303030575C3030306F5C303030725C3030306B}
-\abx@aux@cite{0}{10.1007/3-540-36127-8_20}
-\abx@aux@segm{0}{0}{10.1007/3-540-36127-8_20}
-\abx@aux@cite{0}{brahman-etal-2021-characters-tell}
-\abx@aux@segm{0}{0}{brahman-etal-2021-characters-tell}
+\BKM@entry{id=7,dest={636861707465722E33},srcline={1}}{5C3337365C3337375C303030525C303030655C3030306C5C303030615C303030745C303030655C303030645C3030305C3034305C303030575C3030306F5C303030725C3030306B}
+\BKM@entry{id=8,dest={73656374696F6E2E332E31},srcline={3}}{5C3337365C3337375C303030545C3030306F5C3030306B5C303030655C3030306E5C303030695C3030307A5C303030615C303030745C303030695C3030306F5C3030306E}
+\BKM@entry{id=9,dest={73656374696F6E2E332E32},srcline={13}}{5C3337365C3337375C303030545C303030685C303030655C3030305C3034305C303030545C303030725C303030615C3030306E5C303030735C303030665C3030306F5C303030725C3030306D5C303030655C30303072}
+\BKM@entry{id=10,dest={73756273656374696F6E2E332E322E31},srcline={25}}{5C3337365C3337375C303030455C3030306E5C303030635C3030306F5C303030645C303030655C30303072}
+\BKM@entry{id=11,dest={73756273656374696F6E2E332E322E32},srcline={42}}{5C3337365C3337375C303030445C303030655C303030635C3030306F5C303030645C303030655C30303072}
+\BKM@entry{id=12,dest={73656374696F6E2E332E33},srcline={54}}{5C3337365C3337375C303030425C303030455C303030525C30303054}
+\BKM@entry{id=13,dest={73756273656374696F6E2E332E332E31},srcline={57}}{5C3337365C3337375C303030455C3030306D5C303030625C303030655C303030645C303030645C303030695C3030306E5C303030675C30303073}
+\BKM@entry{id=14,dest={73756273656374696F6E2E332E332E32},srcline={78}}{5C3337365C3337375C303030465C303030695C3030306E5C303030655C3030302D5C303030545C303030755C3030306E5C303030695C3030306E5C30303067}
+\BKM@entry{id=15,dest={73756273656374696F6E2E332E332E33},srcline={84}}{5C3337365C3337375C303030425C303030455C303030525C303030545C303030535C303030635C3030306F5C303030725C30303065}
+\BKM@entry{id=16,dest={73656374696F6E2E332E34},srcline={110}}{5C3337365C3337375C303030425C3030304C5C303030455C303030555C3030302D5C303030535C303030635C3030306F5C303030725C30303065}
 \@input{dataset.aux}
-\BKM@entry{id=15,dest={636861707465722E34},srcline={2}}{5C3337365C3337375C303030475C303030615C303030745C303030685C303030655C303030725C303030695C3030306E5C303030675C3030305C3034305C3030306F5C303030665C3030305C3034305C3030306C5C303030695C303030745C303030655C303030725C303030615C303030745C303030755C303030725C30303065}
+\BKM@entry{id=17,dest={636861707465722E34},srcline={2}}{5C3337365C3337375C303030475C303030615C303030745C303030685C303030655C303030725C303030695C3030306E5C303030675C3030305C3034305C3030306F5C303030665C3030305C3034305C3030306C5C303030695C303030745C303030655C303030725C303030615C303030745C303030755C303030725C30303065}
 \@input{experiments.aux}
-\BKM@entry{id=16,dest={636861707465722E35},srcline={1}}{5C3337365C3337375C303030455C303030785C303030705C303030655C303030725C303030695C3030306D5C303030655C3030306E5C303030745C30303073}
-\BKM@entry{id=17,dest={73656374696F6E2E352E31},srcline={6}}{5C3337365C3337375C303030425C303030615C303030735C303030655C3030305C3034305C303030455C303030785C303030705C303030655C303030725C303030695C3030306D5C303030655C3030306E5C30303074}
-\BKM@entry{id=18,dest={73756273656374696F6E2E352E312E31},srcline={124}}{5C3337365C3337375C303030525C303030655C303030735C303030755C3030306C5C303030745C30303073}
-\BKM@entry{id=19,dest={73756273656374696F6E2E352E312E32},srcline={709}}{5C3337365C3337375C303030415C3030306E5C303030615C3030306C5C303030795C303030735C303030695C30303073}
-\BKM@entry{id=20,dest={73656374696F6E2E352E32},srcline={721}}{5C3337365C3337375C303030535C303030655C3030306C5C303030655C303030635C303030745C303030655C303030645C3030305C3034305C303030455C3030306D5C303030625C303030655C303030645C303030645C303030655C303030645C3030305C3034305C303030635C303030685C303030755C3030306E5C3030306B5C30303073}
+\BKM@entry{id=18,dest={636861707465722E35},srcline={1}}{5C3337365C3337375C303030455C303030785C303030705C303030655C303030725C303030695C3030306D5C303030655C3030306E5C303030745C30303073}
+\BKM@entry{id=19,dest={73656374696F6E2E352E31},srcline={6}}{5C3337365C3337375C303030425C303030615C303030735C303030655C3030305C3034305C303030455C303030785C303030705C303030655C303030725C303030695C3030306D5C303030655C3030306E5C30303074}
+\BKM@entry{id=20,dest={73756273656374696F6E2E352E312E31},srcline={56}}{5C3337365C3337375C303030525C303030655C303030735C303030755C3030306C5C303030745C30303073}
+\BKM@entry{id=21,dest={73756273656374696F6E2E352E312E32},srcline={949}}{5C3337365C3337375C303030415C3030306E5C303030615C3030306C5C303030795C303030735C303030695C30303073}
+\BKM@entry{id=22,dest={73656374696F6E2E352E32},srcline={960}}{5C3337365C3337375C303030535C303030655C3030306C5C303030655C303030635C303030745C303030655C303030645C3030305C3034305C303030455C3030306D5C303030625C303030655C303030645C303030645C303030655C303030645C3030305C3034305C303030635C303030685C303030755C3030306E5C3030306B5C30303073}
 \@input{conclusion.aux}
-\BKM@entry{id=21,dest={636861707465722E36},srcline={1}}{5C3337365C3337375C303030435C3030306F5C3030306E5C303030635C3030306C5C303030755C303030735C303030695C3030306F5C3030306E}
-\BKM@entry{id=22,dest={73656374696F6E2E362E31},srcline={3}}{5C3337365C3337375C303030735C303030755C3030306D5C3030306D5C303030615C303030725C30303079}
-\BKM@entry{id=23,dest={73656374696F6E2E362E32},srcline={4}}{5C3337365C3337375C303030665C303030755C303030745C303030755C303030725C303030655C3030305C3034305C303030775C3030306F5C303030725C3030306B}
-\BKM@entry{id=24,dest={73656374696F6E2E362E33},srcline={5}}{5C3337365C3337375C3030306E5C3030306F5C303030745C303030655C3030305C3034305C3030306F5C303030665C3030305C3034305C303030745C303030685C303030615C3030306E5C3030306B5C30303073}
-\BKM@entry{id=25,dest={636861707465722A2E37},srcline={110}}{5C3337365C3337375C3030304C5C303030695C303030745C303030655C303030725C303030615C303030745C303030755C30303072}
-\@writefile{toc}{\contentsline {chapter}{\nonumberline Literatur}{23}{chapter*.7}\protected@file@percent }
-\@writefile{lof}{\addvspace {10\p@ }}
-\@writefile{lot}{\addvspace {10\p@ }}
-\@writefile{lol}{\addvspace {10\p@ }}
+\BKM@entry{id=23,dest={636861707465722E36},srcline={1}}{5C3337365C3337375C303030435C3030306F5C3030306E5C303030635C3030306C5C303030755C303030735C303030695C3030306F5C3030306E}
+\BKM@entry{id=24,dest={73656374696F6E2E362E31},srcline={3}}{5C3337365C3337375C303030735C303030755C3030306D5C3030306D5C303030615C303030725C30303079}
+\BKM@entry{id=25,dest={73656374696F6E2E362E32},srcline={4}}{5C3337365C3337375C303030665C303030755C303030745C303030755C303030725C303030655C3030305C3034305C303030775C3030306F5C303030725C3030306B}
+\BKM@entry{id=26,dest={73656374696F6E2E362E33},srcline={12}}{5C3337365C3337375C3030306E5C3030306F5C303030745C303030655C3030305C3034305C3030306F5C303030665C3030305C3034305C303030745C303030685C303030615C3030306E5C3030306B5C30303073}
 \@input{appendix.aux}
-\BKM@entry{id=26,dest={636861707465722E37},srcline={1}}{5C3337365C3337375C303030415C303030705C303030705C303030655C3030306E5C303030645C303030695C30303078}
-\BKM@entry{id=27,dest={636861707465722A2E38},srcline={3}}{5C3337365C3337375C303030455C303030695C303030645C303030655C303030735C303030735C303030745C303030615C303030745C303030745C3030306C5C303030695C303030635C303030685C303030655C3030305C3034305C303030565C303030655C303030725C303030735C303030695C303030635C303030685C303030655C303030725C303030755C3030306E5C30303067}
-\@writefile{toc}{\contentsline {chapter}{Eidesstattliche Versicherung}{27}{chapter*.8}\protected@file@percent }
-\abx@aux@read@bbl@mdfivesum{3530656EE3C99722B668C16F3E6CB6BE}
-\abx@aux@defaultrefcontext{0}{brahman-etal-2021-characters-tell}{anyt/global//global/global/global}
-\abx@aux@defaultrefcontext{0}{dobrovolskii-2021-word}{anyt/global//global/global/global}
-\abx@aux@defaultrefcontext{0}{10.1007/3-540-36127-8_20}{anyt/global//global/global/global}
-\abx@aux@defaultrefcontext{0}{schroder-etal-2021-neural}{anyt/global//global/global/global}
+\BKM@entry{id=27,dest={636861707465722E37},srcline={1}}{5C3337365C3337375C303030415C303030705C303030705C303030655C3030306E5C303030645C303030695C30303078}
+\BKM@entry{id=28,dest={636861707465722A2E3133},srcline={3}}{5C3337365C3337375C303030455C303030695C303030645C303030655C303030735C303030735C303030745C303030615C303030745C303030745C3030306C5C303030695C303030635C303030685C303030655C3030305C3034305C303030565C303030655C303030725C303030735C303030695C303030635C303030685C303030655C303030725C303030755C3030306E5C30303067}
+\@writefile{toc}{\contentsline {chapter}{Eidesstattliche Versicherung}{29}{chapter*.13}\protected@file@percent }
+\abx@aux@read@bbl@mdfivesum{D41D8CD98F00B204E9800998ECF8427E}
+\abx@aux@read@bblrerun
 \global\@namedef{scr@dte@chapter@lastmaxnumwidth}{9.85492pt}
 \global\@namedef{scr@dte@section@lastmaxnumwidth}{18.0674pt}
 \global\@namedef{scr@dte@subsection@lastmaxnumwidth}{26.27988pt}
 \@writefile{toc}{\providecommand\tocbasic@end@toc@file{}\tocbasic@end@toc@file}
-\gdef \@abspage@last{31}
+\gdef \@abspage@last{33}
diff --git a/masterthesis.bbl b/masterthesis.bbl
index fc64f39134e95318caa15c7861294cca1c8ff96b..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644
--- a/masterthesis.bbl
+++ b/masterthesis.bbl
@@ -1,333 +0,0 @@
-% $ biblatex auxiliary file $
-% $ biblatex bbl format version 3.3 $
-% Do not modify the above lines!
-%
-% This is an auxiliary file used by the 'biblatex' package.
-% This file may safely be deleted. It will be recreated by
-% biber as required.
-%
-\begingroup
-\makeatletter
-\@ifundefined{ver@biblatex.sty}
-  {\@latex@error
-     {Missing 'biblatex' package}
-     {The bibliography requires the 'biblatex' package.}
-      \aftergroup\endinput}
-  {}
-\endgroup
-
-
-\refsection{0}
-  \datalist[entry]{anyt/global//global/global/global}
-    \entry{brahman-etal-2021-characters-tell}{inproceedings}{}{}
-      \name{author}{6}{}{%
-        {{hash=2b4a38ef233e401589f3114bca784bea}{%
-           family={Brahman},
-           familyi={B\bibinitperiod},
-           given={Faeze},
-           giveni={F\bibinitperiod}}}%
-        {{hash=b702b0ce5b0a9c671b2639e09e0af1a6}{%
-           family={Huang},
-           familyi={H\bibinitperiod},
-           given={Meng},
-           giveni={M\bibinitperiod}}}%
-        {{hash=21dbac8bb4ba1ed555216a1d80595c20}{%
-           family={Tafjord},
-           familyi={T\bibinitperiod},
-           given={Oyvind},
-           giveni={O\bibinitperiod}}}%
-        {{hash=b9749d38cefa5213b5a281a3f3c9bf6d}{%
-           family={Zhao},
-           familyi={Z\bibinitperiod},
-           given={Chao},
-           giveni={C\bibinitperiod}}}%
-        {{hash=642ef2dffcf1d8c1905bcf343127c6d7}{%
-           family={Sachan},
-           familyi={S\bibinitperiod},
-           given={Mrinmaya},
-           giveni={M\bibinitperiod}}}%
-        {{hash=59f387defbbf8029d33da69f047c56e2}{%
-           family={Chaturvedi},
-           familyi={C\bibinitperiod},
-           given={Snigdha},
-           giveni={S\bibinitperiod}}}%
-      }
-      \name{editor}{4}{}{%
-        {{hash=04fe502dceb461055032ff714831f9fd}{%
-           family={Moens},
-           familyi={M\bibinitperiod},
-           given={Marie-Francine},
-           giveni={M\bibinithyphendelim F\bibinitperiod}}}%
-        {{hash=ed0f3403ef6238bb67876d4c6ad27464}{%
-           family={Huang},
-           familyi={H\bibinitperiod},
-           given={Xuanjing},
-           giveni={X\bibinitperiod}}}%
-        {{hash=ede296c12d7a570bc8b70fcb82116a5f}{%
-           family={Specia},
-           familyi={S\bibinitperiod},
-           given={Lucia},
-           giveni={L\bibinitperiod}}}%
-        {{hash=c657f71658dc4a8dc6a0e6d0ec40fb6c}{%
-           family={Yih},
-           familyi={Y\bibinitperiod},
-           given={Scott\bibnamedelima Wen-tau},
-           giveni={S\bibinitperiod\bibinitdelim W\bibinithyphendelim t\bibinitperiod}}}%
-      }
-      \list{location}{1}{%
-        {Punta Cana, Dominican Republic}%
-      }
-      \list{publisher}{1}{%
-        {Association for Computational Linguistics}%
-      }
-      \strng{namehash}{1803656690f9ced5a4e5ccddd24d6ab8}
-      \strng{fullhash}{a83c21161c8a6768f1046478d7624c56}
-      \strng{fullhashraw}{a83c21161c8a6768f1046478d7624c56}
-      \strng{bibnamehash}{1803656690f9ced5a4e5ccddd24d6ab8}
-      \strng{authorbibnamehash}{1803656690f9ced5a4e5ccddd24d6ab8}
-      \strng{authornamehash}{1803656690f9ced5a4e5ccddd24d6ab8}
-      \strng{authorfullhash}{a83c21161c8a6768f1046478d7624c56}
-      \strng{authorfullhashraw}{a83c21161c8a6768f1046478d7624c56}
-      \strng{editorbibnamehash}{37ee52e763dbd58c491e692f53cc91ca}
-      \strng{editornamehash}{37ee52e763dbd58c491e692f53cc91ca}
-      \strng{editorfullhash}{ec90ccf3b84cfe7183bf239f86d7779d}
-      \strng{editorfullhashraw}{ec90ccf3b84cfe7183bf239f86d7779d}
-      \field{labelalpha}{Bra+21}
-      \field{sortinit}{B}
-      \field{sortinithash}{d7095fff47cda75ca2589920aae98399}
-      \field{labelnamesource}{author}
-      \field{labeltitlesource}{title}
-      \field{abstract}{When reading a literary piece, readers often make inferences about various characters{'} roles, personalities, relationships, intents, actions, etc. While humans can readily draw upon their past experiences to build such a character-centric view of the narrative, understanding characters in narratives can be a challenging task for machines. To encourage research in this field of character-centric narrative understanding, we present LiSCU {--} a new dataset of literary pieces and their summaries paired with descriptions of characters that appear in them. We also introduce two new tasks on LiSCU: Character Identification and Character Description Generation. Our experiments with several pre-trained language models adapted for these tasks demonstrate that there is a need for better models of narrative comprehension.}
-      \field{booktitle}{Findings of the Association for Computational Linguistics: EMNLP 2021}
-      \field{month}{11}
-      \field{title}{{``}Let Your Characters Tell Their Story{''}: A Dataset for Character-Centric Narrative Understanding}
-      \field{year}{2021}
-      \field{pages}{1734\bibrangedash 1752}
-      \range{pages}{19}
-      \verb{doi}
-      \verb 10.18653/v1/2021.findings-emnlp.150
-      \endverb
-      \verb{urlraw}
-      \verb https://aclanthology.org/2021.findings-emnlp.150
-      \endverb
-      \verb{url}
-      \verb https://aclanthology.org/2021.findings-emnlp.150
-      \endverb
-    \endentry
-    \entry{dobrovolskii-2021-word}{inproceedings}{}{}
-      \name{author}{1}{}{%
-        {{hash=46e885cada67f54d91688f3a3461f995}{%
-           family={Dobrovolskii},
-           familyi={D\bibinitperiod},
-           given={Vladimir},
-           giveni={V\bibinitperiod}}}%
-      }
-      \name{editor}{4}{}{%
-        {{hash=04fe502dceb461055032ff714831f9fd}{%
-           family={Moens},
-           familyi={M\bibinitperiod},
-           given={Marie-Francine},
-           giveni={M\bibinithyphendelim F\bibinitperiod}}}%
-        {{hash=ed0f3403ef6238bb67876d4c6ad27464}{%
-           family={Huang},
-           familyi={H\bibinitperiod},
-           given={Xuanjing},
-           giveni={X\bibinitperiod}}}%
-        {{hash=ede296c12d7a570bc8b70fcb82116a5f}{%
-           family={Specia},
-           familyi={S\bibinitperiod},
-           given={Lucia},
-           giveni={L\bibinitperiod}}}%
-        {{hash=c657f71658dc4a8dc6a0e6d0ec40fb6c}{%
-           family={Yih},
-           familyi={Y\bibinitperiod},
-           given={Scott\bibnamedelima Wen-tau},
-           giveni={S\bibinitperiod\bibinitdelim W\bibinithyphendelim t\bibinitperiod}}}%
-      }
-      \list{location}{2}{%
-        {Online}%
-        {Punta Cana, Dominican Republic}%
-      }
-      \list{publisher}{1}{%
-        {Association for Computational Linguistics}%
-      }
-      \strng{namehash}{46e885cada67f54d91688f3a3461f995}
-      \strng{fullhash}{46e885cada67f54d91688f3a3461f995}
-      \strng{fullhashraw}{46e885cada67f54d91688f3a3461f995}
-      \strng{bibnamehash}{46e885cada67f54d91688f3a3461f995}
-      \strng{authorbibnamehash}{46e885cada67f54d91688f3a3461f995}
-      \strng{authornamehash}{46e885cada67f54d91688f3a3461f995}
-      \strng{authorfullhash}{46e885cada67f54d91688f3a3461f995}
-      \strng{authorfullhashraw}{46e885cada67f54d91688f3a3461f995}
-      \strng{editorbibnamehash}{37ee52e763dbd58c491e692f53cc91ca}
-      \strng{editornamehash}{37ee52e763dbd58c491e692f53cc91ca}
-      \strng{editorfullhash}{ec90ccf3b84cfe7183bf239f86d7779d}
-      \strng{editorfullhashraw}{ec90ccf3b84cfe7183bf239f86d7779d}
-      \field{labelalpha}{Dob21}
-      \field{sortinit}{D}
-      \field{sortinithash}{6f385f66841fb5e82009dc833c761848}
-      \field{labelnamesource}{author}
-      \field{labeltitlesource}{title}
-      \field{abstract}{Recent coreference resolution models rely heavily on span representations to find coreference links between word spans. As the number of spans is $O(n^2)$ in the length of text and the number of potential links is $O(n^4)$, various pruning techniques are necessary to make this approach computationally feasible. We propose instead to consider coreference links between individual words rather than word spans and then reconstruct the word spans. This reduces the complexity of the coreference model to $O(n^2)$ and allows it to consider all potential mentions without pruning any of them out. We also demonstrate that, with these changes, SpanBERT for coreference resolution will be significantly outperformed by RoBERTa. While being highly efficient, our model performs competitively with recent coreference resolution systems on the OntoNotes benchmark.}
-      \field{booktitle}{Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing}
-      \field{month}{11}
-      \field{title}{Word-Level Coreference Resolution}
-      \field{year}{2021}
-      \field{pages}{7670\bibrangedash 7675}
-      \range{pages}{6}
-      \verb{doi}
-      \verb 10.18653/v1/2021.emnlp-main.605
-      \endverb
-      \verb{urlraw}
-      \verb https://aclanthology.org/2021.emnlp-main.605
-      \endverb
-      \verb{url}
-      \verb https://aclanthology.org/2021.emnlp-main.605
-      \endverb
-    \endentry
-    \entry{10.1007/3-540-36127-8_20}{inproceedings}{}{}
-      \name{author}{3}{}{%
-        {{hash=36eec28b5721d76054629bfe3f1b3c7e}{%
-           family={Neto},
-           familyi={N\bibinitperiod},
-           given={Joel\bibnamedelima Larocca},
-           giveni={J\bibinitperiod\bibinitdelim L\bibinitperiod}}}%
-        {{hash=82469e2f61871ac5e3be89a40311d954}{%
-           family={Freitas},
-           familyi={F\bibinitperiod},
-           given={Alex\bibnamedelima A.},
-           giveni={A\bibinitperiod\bibinitdelim A\bibinitperiod}}}%
-        {{hash=43302d2ad2f59f864d90d8387340c50a}{%
-           family={Kaestner},
-           familyi={K\bibinitperiod},
-           given={Celso\bibnamedelimb A.\bibnamedelimi A.},
-           giveni={C\bibinitperiod\bibinitdelim A\bibinitperiod\bibinitdelim A\bibinitperiod}}}%
-      }
-      \name{editor}{2}{}{%
-        {{hash=a5ca1591a62720d0441b73b29676d25e}{%
-           family={Bittencourt},
-           familyi={B\bibinitperiod},
-           given={Guilherme},
-           giveni={G\bibinitperiod}}}%
-        {{hash=e672502e0249d1ae76482d00faf67a2c}{%
-           family={Ramalho},
-           familyi={R\bibinitperiod},
-           given={Geber\bibnamedelima L.},
-           giveni={G\bibinitperiod\bibinitdelim L\bibinitperiod}}}%
-      }
-      \list{location}{1}{%
-        {Berlin, Heidelberg}%
-      }
-      \list{publisher}{1}{%
-        {Springer Berlin Heidelberg}%
-      }
-      \strng{namehash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{fullhash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{fullhashraw}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{bibnamehash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{authorbibnamehash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{authornamehash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{authorfullhash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{authorfullhashraw}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{editorbibnamehash}{b471e7c819086fc2cd45f2802d06c347}
-      \strng{editornamehash}{b471e7c819086fc2cd45f2802d06c347}
-      \strng{editorfullhash}{b471e7c819086fc2cd45f2802d06c347}
-      \strng{editorfullhashraw}{b471e7c819086fc2cd45f2802d06c347}
-      \field{labelalpha}{NFK02}
-      \field{sortinit}{N}
-      \field{sortinithash}{22369a73d5f88983a108b63f07f37084}
-      \field{labelnamesource}{author}
-      \field{labeltitlesource}{title}
-      \field{abstract}{In this paper we address the automatic summarization task. Recent research works on extractive-summary generation employ some heuristics, but few works indicate how to select the relevant features. We will present a summarization procedure based on the application of trainable Machine Learning algorithms which employs a set of features extracted directly from the original text. These features are of two kinds: statistical - based on the frequency of some elements in the text; and linguistic - extracted from a simplified argumentative structure of the text. We also present some computational results obtained with the application of our summarizer to some well known text databases, and we compare these results to some baseline summarization procedures.}
-      \field{booktitle}{Advances in Artificial Intelligence}
-      \field{isbn}{978-3-540-36127-5}
-      \field{title}{Automatic Text Summarization Using a Machine Learning Approach}
-      \field{year}{2002}
-      \field{pages}{205\bibrangedash 215}
-      \range{pages}{11}
-    \endentry
-    \entry{schroder-etal-2021-neural}{inproceedings}{}{}
-      \name{author}{3}{}{%
-        {{hash=61e1a35cd8f127581604c7176238ecea}{%
-           family={Schröder},
-           familyi={S\bibinitperiod},
-           given={Fynn},
-           giveni={F\bibinitperiod}}}%
-        {{hash=314b84e0117f7efc39b85ff749187916}{%
-           family={Hatzel},
-           familyi={H\bibinitperiod},
-           given={Hans\bibnamedelima Ole},
-           giveni={H\bibinitperiod\bibinitdelim O\bibinitperiod}}}%
-        {{hash=786eca9f4966307b17cae6c3bba98905}{%
-           family={Biemann},
-           familyi={B\bibinitperiod},
-           given={Chris},
-           giveni={C\bibinitperiod}}}%
-      }
-      \name{editor}{5}{}{%
-        {{hash=a45b4d1927bb0ea3a040dfe5d1f0d310}{%
-           family={Evang},
-           familyi={E\bibinitperiod},
-           given={Kilian},
-           giveni={K\bibinitperiod}}}%
-        {{hash=ac4826768da90eb0ef40ee37ef17293c}{%
-           family={Kallmeyer},
-           familyi={K\bibinitperiod},
-           given={Laura},
-           giveni={L\bibinitperiod}}}%
-        {{hash=c2ee7d465388b287ae04672af1876a07}{%
-           family={Osswald},
-           familyi={O\bibinitperiod},
-           given={Rainer},
-           giveni={R\bibinitperiod}}}%
-        {{hash=b5700d65a3f7e881039505b79ba0d8f6}{%
-           family={Waszczuk},
-           familyi={W\bibinitperiod},
-           given={Jakub},
-           giveni={J\bibinitperiod}}}%
-        {{hash=d2bddcda907ea93d3a82a0623d8d0930}{%
-           family={Zesch},
-           familyi={Z\bibinitperiod},
-           given={Torsten},
-           giveni={T\bibinitperiod}}}%
-      }
-      \list{location}{1}{%
-        {Düsseldorf, Germany}%
-      }
-      \list{publisher}{1}{%
-        {KONVENS 2021 Organizers}%
-      }
-      \strng{namehash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{fullhash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{fullhashraw}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{bibnamehash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{authorbibnamehash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{authornamehash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{authorfullhash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{authorfullhashraw}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{editorbibnamehash}{360afad7bdb9fdd8b0f6c012331c8c04}
-      \strng{editornamehash}{360afad7bdb9fdd8b0f6c012331c8c04}
-      \strng{editorfullhash}{e27538100860ec465c704e9604c84c85}
-      \strng{editorfullhashraw}{e27538100860ec465c704e9604c84c85}
-      \field{labelalpha}{SHB21}
-      \field{sortinit}{S}
-      \field{sortinithash}{b164b07b29984b41daf1e85279fbc5ab}
-      \field{labelnamesource}{author}
-      \field{labeltitlesource}{title}
-      \field{booktitle}{Proceedings of the 17th Conference on Natural Language Processing (KONVENS 2021)}
-      \field{month}{6--9 9}
-      \field{title}{Neural End-to-end Coreference Resolution for {G}erman in Different Domains}
-      \field{year}{2021}
-      \field{pages}{170\bibrangedash 181}
-      \range{pages}{12}
-      \verb{urlraw}
-      \verb https://aclanthology.org/2021.konvens-1.15
-      \endverb
-      \verb{url}
-      \verb https://aclanthology.org/2021.konvens-1.15
-      \endverb
-    \endentry
-  \enddatalist
-\endrefsection
-\endinput
-
diff --git a/masterthesis.bbl-SAVE-ERROR b/masterthesis.bbl-SAVE-ERROR
index fc64f39134e95318caa15c7861294cca1c8ff96b..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 100644
--- a/masterthesis.bbl-SAVE-ERROR
+++ b/masterthesis.bbl-SAVE-ERROR
@@ -1,333 +0,0 @@
-% $ biblatex auxiliary file $
-% $ biblatex bbl format version 3.3 $
-% Do not modify the above lines!
-%
-% This is an auxiliary file used by the 'biblatex' package.
-% This file may safely be deleted. It will be recreated by
-% biber as required.
-%
-\begingroup
-\makeatletter
-\@ifundefined{ver@biblatex.sty}
-  {\@latex@error
-     {Missing 'biblatex' package}
-     {The bibliography requires the 'biblatex' package.}
-      \aftergroup\endinput}
-  {}
-\endgroup
-
-
-\refsection{0}
-  \datalist[entry]{anyt/global//global/global/global}
-    \entry{brahman-etal-2021-characters-tell}{inproceedings}{}{}
-      \name{author}{6}{}{%
-        {{hash=2b4a38ef233e401589f3114bca784bea}{%
-           family={Brahman},
-           familyi={B\bibinitperiod},
-           given={Faeze},
-           giveni={F\bibinitperiod}}}%
-        {{hash=b702b0ce5b0a9c671b2639e09e0af1a6}{%
-           family={Huang},
-           familyi={H\bibinitperiod},
-           given={Meng},
-           giveni={M\bibinitperiod}}}%
-        {{hash=21dbac8bb4ba1ed555216a1d80595c20}{%
-           family={Tafjord},
-           familyi={T\bibinitperiod},
-           given={Oyvind},
-           giveni={O\bibinitperiod}}}%
-        {{hash=b9749d38cefa5213b5a281a3f3c9bf6d}{%
-           family={Zhao},
-           familyi={Z\bibinitperiod},
-           given={Chao},
-           giveni={C\bibinitperiod}}}%
-        {{hash=642ef2dffcf1d8c1905bcf343127c6d7}{%
-           family={Sachan},
-           familyi={S\bibinitperiod},
-           given={Mrinmaya},
-           giveni={M\bibinitperiod}}}%
-        {{hash=59f387defbbf8029d33da69f047c56e2}{%
-           family={Chaturvedi},
-           familyi={C\bibinitperiod},
-           given={Snigdha},
-           giveni={S\bibinitperiod}}}%
-      }
-      \name{editor}{4}{}{%
-        {{hash=04fe502dceb461055032ff714831f9fd}{%
-           family={Moens},
-           familyi={M\bibinitperiod},
-           given={Marie-Francine},
-           giveni={M\bibinithyphendelim F\bibinitperiod}}}%
-        {{hash=ed0f3403ef6238bb67876d4c6ad27464}{%
-           family={Huang},
-           familyi={H\bibinitperiod},
-           given={Xuanjing},
-           giveni={X\bibinitperiod}}}%
-        {{hash=ede296c12d7a570bc8b70fcb82116a5f}{%
-           family={Specia},
-           familyi={S\bibinitperiod},
-           given={Lucia},
-           giveni={L\bibinitperiod}}}%
-        {{hash=c657f71658dc4a8dc6a0e6d0ec40fb6c}{%
-           family={Yih},
-           familyi={Y\bibinitperiod},
-           given={Scott\bibnamedelima Wen-tau},
-           giveni={S\bibinitperiod\bibinitdelim W\bibinithyphendelim t\bibinitperiod}}}%
-      }
-      \list{location}{1}{%
-        {Punta Cana, Dominican Republic}%
-      }
-      \list{publisher}{1}{%
-        {Association for Computational Linguistics}%
-      }
-      \strng{namehash}{1803656690f9ced5a4e5ccddd24d6ab8}
-      \strng{fullhash}{a83c21161c8a6768f1046478d7624c56}
-      \strng{fullhashraw}{a83c21161c8a6768f1046478d7624c56}
-      \strng{bibnamehash}{1803656690f9ced5a4e5ccddd24d6ab8}
-      \strng{authorbibnamehash}{1803656690f9ced5a4e5ccddd24d6ab8}
-      \strng{authornamehash}{1803656690f9ced5a4e5ccddd24d6ab8}
-      \strng{authorfullhash}{a83c21161c8a6768f1046478d7624c56}
-      \strng{authorfullhashraw}{a83c21161c8a6768f1046478d7624c56}
-      \strng{editorbibnamehash}{37ee52e763dbd58c491e692f53cc91ca}
-      \strng{editornamehash}{37ee52e763dbd58c491e692f53cc91ca}
-      \strng{editorfullhash}{ec90ccf3b84cfe7183bf239f86d7779d}
-      \strng{editorfullhashraw}{ec90ccf3b84cfe7183bf239f86d7779d}
-      \field{labelalpha}{Bra+21}
-      \field{sortinit}{B}
-      \field{sortinithash}{d7095fff47cda75ca2589920aae98399}
-      \field{labelnamesource}{author}
-      \field{labeltitlesource}{title}
-      \field{abstract}{When reading a literary piece, readers often make inferences about various characters{'} roles, personalities, relationships, intents, actions, etc. While humans can readily draw upon their past experiences to build such a character-centric view of the narrative, understanding characters in narratives can be a challenging task for machines. To encourage research in this field of character-centric narrative understanding, we present LiSCU {--} a new dataset of literary pieces and their summaries paired with descriptions of characters that appear in them. We also introduce two new tasks on LiSCU: Character Identification and Character Description Generation. Our experiments with several pre-trained language models adapted for these tasks demonstrate that there is a need for better models of narrative comprehension.}
-      \field{booktitle}{Findings of the Association for Computational Linguistics: EMNLP 2021}
-      \field{month}{11}
-      \field{title}{{``}Let Your Characters Tell Their Story{''}: A Dataset for Character-Centric Narrative Understanding}
-      \field{year}{2021}
-      \field{pages}{1734\bibrangedash 1752}
-      \range{pages}{19}
-      \verb{doi}
-      \verb 10.18653/v1/2021.findings-emnlp.150
-      \endverb
-      \verb{urlraw}
-      \verb https://aclanthology.org/2021.findings-emnlp.150
-      \endverb
-      \verb{url}
-      \verb https://aclanthology.org/2021.findings-emnlp.150
-      \endverb
-    \endentry
-    \entry{dobrovolskii-2021-word}{inproceedings}{}{}
-      \name{author}{1}{}{%
-        {{hash=46e885cada67f54d91688f3a3461f995}{%
-           family={Dobrovolskii},
-           familyi={D\bibinitperiod},
-           given={Vladimir},
-           giveni={V\bibinitperiod}}}%
-      }
-      \name{editor}{4}{}{%
-        {{hash=04fe502dceb461055032ff714831f9fd}{%
-           family={Moens},
-           familyi={M\bibinitperiod},
-           given={Marie-Francine},
-           giveni={M\bibinithyphendelim F\bibinitperiod}}}%
-        {{hash=ed0f3403ef6238bb67876d4c6ad27464}{%
-           family={Huang},
-           familyi={H\bibinitperiod},
-           given={Xuanjing},
-           giveni={X\bibinitperiod}}}%
-        {{hash=ede296c12d7a570bc8b70fcb82116a5f}{%
-           family={Specia},
-           familyi={S\bibinitperiod},
-           given={Lucia},
-           giveni={L\bibinitperiod}}}%
-        {{hash=c657f71658dc4a8dc6a0e6d0ec40fb6c}{%
-           family={Yih},
-           familyi={Y\bibinitperiod},
-           given={Scott\bibnamedelima Wen-tau},
-           giveni={S\bibinitperiod\bibinitdelim W\bibinithyphendelim t\bibinitperiod}}}%
-      }
-      \list{location}{2}{%
-        {Online}%
-        {Punta Cana, Dominican Republic}%
-      }
-      \list{publisher}{1}{%
-        {Association for Computational Linguistics}%
-      }
-      \strng{namehash}{46e885cada67f54d91688f3a3461f995}
-      \strng{fullhash}{46e885cada67f54d91688f3a3461f995}
-      \strng{fullhashraw}{46e885cada67f54d91688f3a3461f995}
-      \strng{bibnamehash}{46e885cada67f54d91688f3a3461f995}
-      \strng{authorbibnamehash}{46e885cada67f54d91688f3a3461f995}
-      \strng{authornamehash}{46e885cada67f54d91688f3a3461f995}
-      \strng{authorfullhash}{46e885cada67f54d91688f3a3461f995}
-      \strng{authorfullhashraw}{46e885cada67f54d91688f3a3461f995}
-      \strng{editorbibnamehash}{37ee52e763dbd58c491e692f53cc91ca}
-      \strng{editornamehash}{37ee52e763dbd58c491e692f53cc91ca}
-      \strng{editorfullhash}{ec90ccf3b84cfe7183bf239f86d7779d}
-      \strng{editorfullhashraw}{ec90ccf3b84cfe7183bf239f86d7779d}
-      \field{labelalpha}{Dob21}
-      \field{sortinit}{D}
-      \field{sortinithash}{6f385f66841fb5e82009dc833c761848}
-      \field{labelnamesource}{author}
-      \field{labeltitlesource}{title}
-      \field{abstract}{Recent coreference resolution models rely heavily on span representations to find coreference links between word spans. As the number of spans is $O(n^2)$ in the length of text and the number of potential links is $O(n^4)$, various pruning techniques are necessary to make this approach computationally feasible. We propose instead to consider coreference links between individual words rather than word spans and then reconstruct the word spans. This reduces the complexity of the coreference model to $O(n^2)$ and allows it to consider all potential mentions without pruning any of them out. We also demonstrate that, with these changes, SpanBERT for coreference resolution will be significantly outperformed by RoBERTa. While being highly efficient, our model performs competitively with recent coreference resolution systems on the OntoNotes benchmark.}
-      \field{booktitle}{Proceedings of the 2021 Conference on Empirical Methods in Natural Language Processing}
-      \field{month}{11}
-      \field{title}{Word-Level Coreference Resolution}
-      \field{year}{2021}
-      \field{pages}{7670\bibrangedash 7675}
-      \range{pages}{6}
-      \verb{doi}
-      \verb 10.18653/v1/2021.emnlp-main.605
-      \endverb
-      \verb{urlraw}
-      \verb https://aclanthology.org/2021.emnlp-main.605
-      \endverb
-      \verb{url}
-      \verb https://aclanthology.org/2021.emnlp-main.605
-      \endverb
-    \endentry
-    \entry{10.1007/3-540-36127-8_20}{inproceedings}{}{}
-      \name{author}{3}{}{%
-        {{hash=36eec28b5721d76054629bfe3f1b3c7e}{%
-           family={Neto},
-           familyi={N\bibinitperiod},
-           given={Joel\bibnamedelima Larocca},
-           giveni={J\bibinitperiod\bibinitdelim L\bibinitperiod}}}%
-        {{hash=82469e2f61871ac5e3be89a40311d954}{%
-           family={Freitas},
-           familyi={F\bibinitperiod},
-           given={Alex\bibnamedelima A.},
-           giveni={A\bibinitperiod\bibinitdelim A\bibinitperiod}}}%
-        {{hash=43302d2ad2f59f864d90d8387340c50a}{%
-           family={Kaestner},
-           familyi={K\bibinitperiod},
-           given={Celso\bibnamedelimb A.\bibnamedelimi A.},
-           giveni={C\bibinitperiod\bibinitdelim A\bibinitperiod\bibinitdelim A\bibinitperiod}}}%
-      }
-      \name{editor}{2}{}{%
-        {{hash=a5ca1591a62720d0441b73b29676d25e}{%
-           family={Bittencourt},
-           familyi={B\bibinitperiod},
-           given={Guilherme},
-           giveni={G\bibinitperiod}}}%
-        {{hash=e672502e0249d1ae76482d00faf67a2c}{%
-           family={Ramalho},
-           familyi={R\bibinitperiod},
-           given={Geber\bibnamedelima L.},
-           giveni={G\bibinitperiod\bibinitdelim L\bibinitperiod}}}%
-      }
-      \list{location}{1}{%
-        {Berlin, Heidelberg}%
-      }
-      \list{publisher}{1}{%
-        {Springer Berlin Heidelberg}%
-      }
-      \strng{namehash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{fullhash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{fullhashraw}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{bibnamehash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{authorbibnamehash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{authornamehash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{authorfullhash}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{authorfullhashraw}{e40e65098efbb12c1c3c7a194b7d45d3}
-      \strng{editorbibnamehash}{b471e7c819086fc2cd45f2802d06c347}
-      \strng{editornamehash}{b471e7c819086fc2cd45f2802d06c347}
-      \strng{editorfullhash}{b471e7c819086fc2cd45f2802d06c347}
-      \strng{editorfullhashraw}{b471e7c819086fc2cd45f2802d06c347}
-      \field{labelalpha}{NFK02}
-      \field{sortinit}{N}
-      \field{sortinithash}{22369a73d5f88983a108b63f07f37084}
-      \field{labelnamesource}{author}
-      \field{labeltitlesource}{title}
-      \field{abstract}{In this paper we address the automatic summarization task. Recent research works on extractive-summary generation employ some heuristics, but few works indicate how to select the relevant features. We will present a summarization procedure based on the application of trainable Machine Learning algorithms which employs a set of features extracted directly from the original text. These features are of two kinds: statistical - based on the frequency of some elements in the text; and linguistic - extracted from a simplified argumentative structure of the text. We also present some computational results obtained with the application of our summarizer to some well known text databases, and we compare these results to some baseline summarization procedures.}
-      \field{booktitle}{Advances in Artificial Intelligence}
-      \field{isbn}{978-3-540-36127-5}
-      \field{title}{Automatic Text Summarization Using a Machine Learning Approach}
-      \field{year}{2002}
-      \field{pages}{205\bibrangedash 215}
-      \range{pages}{11}
-    \endentry
-    \entry{schroder-etal-2021-neural}{inproceedings}{}{}
-      \name{author}{3}{}{%
-        {{hash=61e1a35cd8f127581604c7176238ecea}{%
-           family={Schröder},
-           familyi={S\bibinitperiod},
-           given={Fynn},
-           giveni={F\bibinitperiod}}}%
-        {{hash=314b84e0117f7efc39b85ff749187916}{%
-           family={Hatzel},
-           familyi={H\bibinitperiod},
-           given={Hans\bibnamedelima Ole},
-           giveni={H\bibinitperiod\bibinitdelim O\bibinitperiod}}}%
-        {{hash=786eca9f4966307b17cae6c3bba98905}{%
-           family={Biemann},
-           familyi={B\bibinitperiod},
-           given={Chris},
-           giveni={C\bibinitperiod}}}%
-      }
-      \name{editor}{5}{}{%
-        {{hash=a45b4d1927bb0ea3a040dfe5d1f0d310}{%
-           family={Evang},
-           familyi={E\bibinitperiod},
-           given={Kilian},
-           giveni={K\bibinitperiod}}}%
-        {{hash=ac4826768da90eb0ef40ee37ef17293c}{%
-           family={Kallmeyer},
-           familyi={K\bibinitperiod},
-           given={Laura},
-           giveni={L\bibinitperiod}}}%
-        {{hash=c2ee7d465388b287ae04672af1876a07}{%
-           family={Osswald},
-           familyi={O\bibinitperiod},
-           given={Rainer},
-           giveni={R\bibinitperiod}}}%
-        {{hash=b5700d65a3f7e881039505b79ba0d8f6}{%
-           family={Waszczuk},
-           familyi={W\bibinitperiod},
-           given={Jakub},
-           giveni={J\bibinitperiod}}}%
-        {{hash=d2bddcda907ea93d3a82a0623d8d0930}{%
-           family={Zesch},
-           familyi={Z\bibinitperiod},
-           given={Torsten},
-           giveni={T\bibinitperiod}}}%
-      }
-      \list{location}{1}{%
-        {Düsseldorf, Germany}%
-      }
-      \list{publisher}{1}{%
-        {KONVENS 2021 Organizers}%
-      }
-      \strng{namehash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{fullhash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{fullhashraw}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{bibnamehash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{authorbibnamehash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{authornamehash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{authorfullhash}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{authorfullhashraw}{2565bff841ce070cdf9a2979052ea4aa}
-      \strng{editorbibnamehash}{360afad7bdb9fdd8b0f6c012331c8c04}
-      \strng{editornamehash}{360afad7bdb9fdd8b0f6c012331c8c04}
-      \strng{editorfullhash}{e27538100860ec465c704e9604c84c85}
-      \strng{editorfullhashraw}{e27538100860ec465c704e9604c84c85}
-      \field{labelalpha}{SHB21}
-      \field{sortinit}{S}
-      \field{sortinithash}{b164b07b29984b41daf1e85279fbc5ab}
-      \field{labelnamesource}{author}
-      \field{labeltitlesource}{title}
-      \field{booktitle}{Proceedings of the 17th Conference on Natural Language Processing (KONVENS 2021)}
-      \field{month}{6--9 9}
-      \field{title}{Neural End-to-end Coreference Resolution for {G}erman in Different Domains}
-      \field{year}{2021}
-      \field{pages}{170\bibrangedash 181}
-      \range{pages}{12}
-      \verb{urlraw}
-      \verb https://aclanthology.org/2021.konvens-1.15
-      \endverb
-      \verb{url}
-      \verb https://aclanthology.org/2021.konvens-1.15
-      \endverb
-    \endentry
-  \enddatalist
-\endrefsection
-\endinput
-
diff --git a/masterthesis.bcf b/masterthesis.bcf
index 80aab066bb5deddfec14feb6ece8e6ea23a1e611..70361576d8a5b0d83581306fcb3541ba02c55ce5 100644
--- a/masterthesis.bcf
+++ b/masterthesis.bcf
@@ -2372,10 +2372,11 @@
     <bcf:datasource type="file" datatype="bibtex" glob="false">_bibliography.bib</bcf:datasource>
   </bcf:bibdata>
   <bcf:section number="0">
-    <bcf:citekey order="1" intorder="1">schroder-etal-2021-neural</bcf:citekey>
-    <bcf:citekey order="2" intorder="1">dobrovolskii-2021-word</bcf:citekey>
-    <bcf:citekey order="3" intorder="1">10.1007/3-540-36127-8_20</bcf:citekey>
-    <bcf:citekey order="4" intorder="1">brahman-etal-2021-characters-tell</bcf:citekey>
+    <bcf:citekey order="1" intorder="1">10.1007/3-540-36127-8_20</bcf:citekey>
+    <bcf:citekey order="2" intorder="1">brahman-etal-2021-characters-tell</bcf:citekey>
+    <bcf:citekey order="3" intorder="1">schroder-etal-2021-neural</bcf:citekey>
+    <bcf:citekey order="4" intorder="1">schroder-etal-2021-neural</bcf:citekey>
+    <bcf:citekey order="5" intorder="1">dobrovolskii-2021-word</bcf:citekey>
   </bcf:section>
   <!-- SORTING TEMPLATES -->
   <bcf:sortingtemplate name="anyt">
diff --git a/masterthesis.fdb_latexmk b/masterthesis.fdb_latexmk
index 89bc30231bc76cb3329bbb47a16e5ca2fec8389f..eba24e384f3dc6e710a04731e6b58c560a1317eb 100644
--- a/masterthesis.fdb_latexmk
+++ b/masterthesis.fdb_latexmk
@@ -1,23 +1,21 @@
 # Fdb version 4
-["biber masterthesis"] 1718729071.27769 "masterthesis.bcf" "masterthesis.bbl" "masterthesis" 1719008558.24882 0
-  "_bibliography.bib" 1716391331 6346 b914bbdad333d562f7ed4fca08e399d0 ""
-  "masterthesis.bcf" 1719008558 108917 e03a5bf58039b649aef246b5adda42f8 "pdflatex"
+["biber masterthesis"] 1721418358.72647 "masterthesis.bcf" "masterthesis.bbl" "masterthesis" 1721466440.60445 0
+  "_bibliography.bib" 1721417266 6548 3a148e74c262e18ef3c412eb022e9a7e ""
+  "masterthesis.bcf" 1721466440 108997 bcf99afa8345e0ee0af30f1761279c5e "pdflatex"
   (generated)
   "masterthesis.bbl"
   "masterthesis.blg"
   (rewritten before read)
-["pdflatex"] 1719008553.74917 "c:/Users/danie/Desktop/Masterarbbeit NLP/masterthesis.tex" "masterthesis.pdf" "masterthesis" 1719008558.24929 0
+["pdflatex"] 1721466433.16013 "c:/Users/danie/Desktop/Masterarbbeit NLP/masterthesis.tex" "masterthesis.pdf" "masterthesis" 1721466440.6049 0
   "C:/Users/danie/AppData/Local/MiKTeX/fonts/map/pdftex/pdftex.map" 1718623652 82507 a9eef8696c92627b87e9975edea03b2a ""
   "C:/Users/danie/AppData/Local/MiKTeX/miktex/data/le/pdftex/pdflatex.fmt" 1714948752 24236005 173a77b29316c5b8c9fa5d48235cc505 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/enc/dvips/base/8r.enc" 1458473887 4993 80dc9bab7f31fb78a000ccfed0e27cab ""
-  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/courier/pcrr8r.tfm" 1136765053 1292 bd42be2f344128bff6d35d98474adfe3 ""
-  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/courier/pcrr8t.tfm" 1136765053 1384 4632f5e54900a7dadbb83f555bc61e56 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplb8r.tfm" 1136768654 2532 9ad73cf4dd2173a847f2a5f5608e0b9a ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplb8t.tfm" 1136768654 3456 16dd534f88eb2bd21ebc7203786b436e ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplr7t.tfm" 1136768654 1804 7b5f73028f6509167f47ace9d69509ed ""
+  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplr8c.tfm" 1136768654 1348 0e8eb69b4437626c0f6c9bed4a4e373c ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplr8r.tfm" 1136768654 2796 d37c29814a6717720ee1a7c9a0b2c3b8 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplr8t.tfm" 1136768654 3820 ee5b9d58608ae328e43c6e2bfd4ff851 ""
-  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplrc8t.tfm" 1136768654 6396 8f2d6400481d3b17b3e6693d0adfe2c9 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplri8r.tfm" 1136768654 2720 70000d5e623e601132eab3cded5b819b ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplri8t.tfm" 1136768654 3684 929c666381f7272e81481908b735ccba ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/adobe/palatino/pplro8r.tfm" 1136768654 2948 325b50af1ea2702f0a5da8c6a9d8c345 ""
@@ -25,13 +23,16 @@
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/jknappen/ec/ecrm1095.tfm" 993062124 3172 eb6efaa83c6c52473a1c403f376eab0f ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/amsfonts/symbols/msam10.tfm" 1233951854 916 f87d7c45f9c908e672703b83b72241a3 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/amsfonts/symbols/msbm10.tfm" 1233951854 908 2921f8a10601f252058503cc6570e581 ""
+  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/cm/cmbx10.tfm" 1136765053 1328 c834bbb027764024c09d3d2bf908b5f0 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/cm/cmex10.tfm" 1136765053 992 662f679a0b3d2d53c1b94050fdaa3f50 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/cm/cmmi10.tfm" 1136765053 1528 abec98dbc43e172678c11b3b9031252a ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/cm/cmr10.tfm" 1136765053 1296 45809c5a464d5f32c8f98ba97c1bb47f ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/cm/cmsy10.tfm" 1136765053 1124 6c73e740cf17375f03eec0ee63599741 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/latex-fonts/lasy6.tfm" 1136765053 520 4889cce2180234b97cad636b6039c722 ""
+  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/mathpazo/fplmb.tfm" 1136768654 984 3dcde7360ef3d34302e591b511cba83c ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/mathpazo/fplmr.tfm" 1136768654 1032 7fa31d93ecd4cbdfac02c7a1ebc6facc ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/mathpazo/fplmri.tfm" 1136768654 1148 56b0fdf7ca3f800294e968f64cfdcb87 ""
+  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/mathpazo/zplmb7t.tfm" 1136768654 1640 c14e51df62d6cb0dd79e4ca3b91ca7c3 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/mathpazo/zplmr7m.tfm" 1136768654 2080 40a71f65088bdd047622ce295520749e ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/mathpazo/zplmr7t.tfm" 1136768654 1828 bb8add3bd19426549f9267c88b0cb8bd ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/tfm/public/mathpazo/zplmr7v.tfm" 1136768654 1012 955c4ca523d7827d33db91a33412b048 ""
@@ -42,17 +43,16 @@
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmsy10.pfb" 1247596667 32569 5e5ddc8df908dea60932f3c484a54c0d ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/mathpazo/fplmr.pfb" 1136849748 9356 3e1f7eacbb57d05d0b682da8d83eb600 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/mathpazo/fplmri.pfb" 1136849748 15213 d999bbe8bd9c8f231f3f66a737c76478 ""
-  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/courier/ucrr8a.pfb" 1136846148 45758 19968a0990191524e34e1994d4a31cb6 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplb8a.pfb" 1136849748 52406 dad2f72863a03727d5f536c64a69c452 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplr8a.pfb" 1136849748 52665 661b1e6b26edb5f50dd491f8a701cb57 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplri8a.pfb" 1136849748 50022 90249cba7e3e4e9845f80328d6f9bd13 ""
-  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/adobe/courier/pcrr8t.vf" 1136765053 2184 8475af1b9cfa983db5f46f5ed4b8f9f7 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/adobe/palatino/pplb8t.vf" 1136768654 2316 c3899bd8afb459a9a5a9a546bf3029a2 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/adobe/palatino/pplr7t.vf" 1136768654 1380 037725697594405988999f61172f52de ""
+  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/adobe/palatino/pplr8c.vf" 1136768654 3504 f65f653e1ad318a9d01b4af15300683a ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/adobe/palatino/pplr8t.vf" 1136768654 2324 a163806de13ddf37313d2ee968aa0a98 ""
-  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/adobe/palatino/pplrc8t.vf" 1136768654 3612 c9f70b808a41736c70babe50fdb9f79c ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/adobe/palatino/pplri8t.vf" 1136768654 2308 5bc0a90b83a3fd7d37d34ef3b64d7e8a ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/adobe/palatino/pplro8t.vf" 1136768654 2324 7474f923d9dd66ec0846639f92c9aec3 ""
+  "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/public/mathpazo/zplmb7t.vf" 1136768654 1536 03ea9eccf9f5341331bed4d35eb9a3d8 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/public/mathpazo/zplmr7m.vf" 1136768654 1812 de7624743c8bafe2ce204bec2f72ce7e ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/public/mathpazo/zplmr7t.vf" 1136768654 1532 0ba0b316cfeb3df86c9701756c9a4b71 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/vf/public/mathpazo/zplmr7v.vf" 1136768654 1436 3cc3caf644d79936444909aaff6786a8 ""
@@ -278,8 +278,8 @@
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/psnfss/omxzplm.fd" 1585253778 339 fb88cb3e5f25cf1596ba8826c4210e0e ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/psnfss/ot1ppl.fd" 1585253778 1007 06b773644d960aac68add40fcb596208 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/psnfss/ot1zplm.fd" 1585253778 458 5e7baa1a259bab50a2bdffad6426f38c ""
-  "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/psnfss/t1pcr.fd" 1585253778 835 d5895e9edc628f2be019beb2c0ec66df ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/psnfss/t1ppl.fd" 1585253778 817 5747d3f33ec2f7c4881c94e931a130b1 ""
+  "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/psnfss/ts1ppl.fd" 1585253778 653 35f1cf70bf09b6e2d22ee0902cfb1a8b ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/refcount/refcount.sty" 1576433952 9878 9e94e8fa600d95f9c7731bb21dfb67a4 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/rerunfilecheck/rerunfilecheck.sty" 1657800696 9714 ba3194bd52c8499b3f1e3eb91d409670 ""
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/setspace/setspace.sty" 1670403145 22490 8cac309b79a4c53a4ffce4b1b07aead0 ""
@@ -289,35 +289,38 @@
   "C:/Users/danie/AppData/Local/Programs/MiKTeX/tex/latex/xcolor/xcolor.sty" 1700127522 55487 80a65caedd3722f4c20a14a69e785d8f ""
   "_eidversicherung.tex" 1714902046 922 feed3b62108272e791d7f65fdc1d7c84 ""
   "_preamble.sty" 1714948947 2576 04bee0dc31ab20447aa0afe0a876cea2 ""
-  "_titlepage.aux" 1719008555 4054 5ae16f9b27d4dbdf1829efb7bed43b88 "pdflatex"
+  "_titlepage.aux" 1721466435 4054 5ae16f9b27d4dbdf1829efb7bed43b88 "pdflatex"
   "_titlepage.tex" 1716985422 1384 1962e7b62f97fe2185bbcd860e30e2b5 ""
-  "appendix.aux" 1719008557 4273 fcc5510ed43c8b9c5a068c1768061678 "pdflatex"
+  "appendix.aux" 1721466440 4273 a84baf7b7b10130c4b192b93c2b1d932 "pdflatex"
   "appendix.tex" 1714902046 153 96de2ec39c79368bf559da3902b44c49 ""
-  "c:/Users/danie/Desktop/Masterarbbeit NLP/masterthesis.tex" 1718918656 3388 1a3d3b050246f34b1da3790d15078c72 ""
-  "conclusion.aux" 1719008557 4615 4ded98a4a3c3a67bf9385588ceaa4c7a "pdflatex"
-  "conclusion.tex" 1717528359 335 9748e8b1241fc403232a54d74006565d ""
-  "dataset.aux" 1719008556 4620 41239dfac69f37589b36fea166d314e6 "pdflatex"
-  "dataset.tex" 1717528568 3652 939e7e589664966b254853117003c9a5 ""
-  "experiments.aux" 1719008557 7277 bc90aae8838c76d1ee3c945ca4e4f5cf "pdflatex"
-  "experiments.tex" 1719008553 32571 810c6a63ec03a3777784360400e90311 ""
-  "introduction.aux" 1719008555 4279 8c6f0e498f6d68235a071aa1caa56107 "pdflatex"
+  "c:/Users/danie/Desktop/Masterarbbeit NLP/masterthesis.tex" 1721420140 3388 1a3d3b050246f34b1da3790d15078c72 ""
+  "conclusion.aux" 1721466440 4615 9352ffd01f51b089dad80cd898472ab1 "pdflatex"
+  "conclusion.tex" 1721423671 1885 1189f8c04c4ad14ce77fc386be4f8283 ""
+  "dataset.aux" 1721466435 4620 052ed7650ac13ea9ae3bfe5157ab7e7a "pdflatex"
+  "dataset.tex" 1720560352 3887 f851a1f5f6bfa6afdd61b8c8762ae0aa ""
+  "experiments.aux" 1721466440 9495 c8a7082b8209ed2cecf7f160899b7fc0 "pdflatex"
+  "experiments.tex" 1721420054 63035 2b6583406831b2267c57b6afca03617b ""
+  "introduction.aux" 1721466435 4279 8c6f0e498f6d68235a071aa1caa56107 "pdflatex"
   "introduction.tex" 1717514965 2519 caaf188bdca17cf9dc5a9a75a0711e3c ""
-  "masterthesis.aux" 1719008557 7676 870334d01829bdb87bd03fea6c061925 "pdflatex"
-  "masterthesis.bbl" 1718729075 15919 597efc0038b12521d31156c1f1e6bc00 "biber masterthesis"
-  "masterthesis.out" 1719008555 0 d41d8cd98f00b204e9800998ecf8427e "pdflatex"
-  "masterthesis.run.xml" 1719008558 2400 1590a67ffdbdc89a727a06496d3c1bae "pdflatex"
-  "masterthesis.tex" 1718918656 3388 1a3d3b050246f34b1da3790d15078c72 ""
-  "masterthesis.toc" 1719008558 2083 e252035d83121e27d02c87cd8ca1d146 "pdflatex"
-  "methodology.aux" 1719008555 5564 ad289baffc6b9187af1ad6ccccdc7dd8 "pdflatex"
-  "methodology.tex" 1717708498 10014 b8c408b31a520ffbdfc632b9c71fef47 ""
-  "related_work.aux" 1719008556 4280 6b78037eeba9c3cbd4c715c7100a50f0 "pdflatex"
-  "related_work.tex" 1714902046 2004 9153b206d7152d9e4845d0e453956980 ""
+  "masterthesis.aux" 1721466440 7174 82f1dc9e0d76e8e93af9db8b4fefcb2c "pdflatex"
+  "masterthesis.bbl" 1721418359 0 d41d8cd98f00b204e9800998ecf8427e "biber masterthesis"
+  "masterthesis.out" 1721466434 0 d41d8cd98f00b204e9800998ecf8427e "pdflatex"
+  "masterthesis.run.xml" 1721466440 2400 1848bc4da31ac1293f6167714deb6b3f "pdflatex"
+  "masterthesis.tex" 1721420140 3388 1a3d3b050246f34b1da3790d15078c72 ""
+  "masterthesis.toc" 1721466440 2158 3793db194dffb536ea24953c46df8cd8 "pdflatex"
+  "methodology.aux" 1721466435 4739 f85ab45a0008362aa6e2057f7de66cd4 "pdflatex"
+  "methodology.tex" 1721465953 10007 9269ed44f1c23e4e46cdcda199ac9a7a ""
+  "related_work.aux" 1721466435 5496 f183af63cc122e3f58bba6c9cf174e0b "pdflatex"
+  "related_work.tex" 1721466425 13064 d150873a836234bdc53e7187220a6b60 ""
   "ressources/data/boxplot.csv" 1718372255 787 519a7447dc5f7bd6600c5df12532bd26 ""
   "ressources/data/boxplot2.csv" 1718388614 793 a962dfffdce3eb67873136aaadd2d495 ""
+  "ressources/data/normal_eval.csv" 1721248772 58566 34ec3199199a2b59db370e2a609627e3 ""
+  "ressources/data/normal_eval2.csv" 1721248782 58383 d5faa98f19ee15202360060ea2a698fe ""
   "ressources/data/results.csv" 1718231047 39240 fda157a6180887295fc595c087b0158c ""
   "ressources/data/results2.csv" 1714902046 57721 f9398dac7707cd586b7bc478ff5ca89b ""
   "ressources/data/results3.csv" 1714902046 58178 fd4ccd17c8f25be661e5fc6102de6f49 ""
   "ressources/data/results4.csv" 1714902046 58064 e3cbe39ad2e29228e2c47b6c07b140a9 ""
+  "ressources/images/Transformer.png" 1720785528 102232 840a52c4f8463d53d6eda9a12299318c ""
   "ressources/images/unilogo.png" 1714902046 188583 9da9274b5a36a07461b1c2a7a12f15e9 ""
   (generated)
   "_titlepage.aux"
diff --git a/masterthesis.fls b/masterthesis.fls
index 3182cd52e53e7381368ab2bd837f94582481faf3..1ea6bdba02edbd4839b40817f82e8d7945516267 100644
--- a/masterthesis.fls
+++ b/masterthesis.fls
@@ -563,24 +563,45 @@ INPUT .\methodology.tex
 INPUT .\methodology.tex
 INPUT methodology.tex
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplb8t.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplb8t.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplb8t.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplb8r.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplb8t.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplb8t.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplb8r.tfm
+INPUT .\related_work.tex
+INPUT .\related_work.tex
+OUTPUT related_work.aux
+INPUT .\related_work.tex
+INPUT .\related_work.tex
+INPUT related_work.tex
+INPUT .\ressources\images\Transformer.png
+INPUT .\ressources\images\Transformer.png
+INPUT .\ressources\images\Transformer.png
+INPUT .\ressources\images\Transformer.png
+INPUT .\ressources\images\Transformer.png
+INPUT .\ressources\images\Transformer.png
+INPUT .\ressources\images\Transformer.png
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\tex\latex\psnfss\ts1ppl.fd
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\tex\latex\psnfss\ts1ppl.fd
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\tex\latex\psnfss\ts1ppl.fd
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8c.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplr8c.vf
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmr.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zplmb7t.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zplmb7t.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zplmb7t.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8t.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8t.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplr7t.vf
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7t.vf
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmr10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7m.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmri.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmmi10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplri8r.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7m.vf
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmri.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmmi10.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8r.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplri8r.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7t.vf
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmr.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmr10.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplr7t.vf
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmb7t.vf
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmb.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmbx10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7y.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmsy10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7v.vf
@@ -588,9 +609,15 @@ INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmex10.tf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmex10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmex10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmr.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7m.vf
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmri.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmmi10.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8r.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplri8r.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7t.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmr.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmr10.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplr7t.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7m.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmri.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmmi10.tfm
@@ -598,13 +625,6 @@ INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplri8r.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7y.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmsy10.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplr7t.vf
-INPUT .\related_work.tex
-INPUT .\related_work.tex
-OUTPUT related_work.aux
-INPUT .\related_work.tex
-INPUT .\related_work.tex
-INPUT related_work.tex
 INPUT .\dataset.tex
 INPUT .\dataset.tex
 OUTPUT dataset.aux
@@ -617,16 +637,30 @@ OUTPUT experiments.aux
 INPUT .\experiments.tex
 INPUT .\experiments.tex
 INPUT experiments.tex
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8t.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8t.tfm
 INPUT ressources\data\results.csv
+INPUT ressources\data\results.csv
+INPUT ressources\data\results.csv
+INPUT ressources\data\results2.csv
 INPUT ressources\data\results2.csv
+INPUT ressources\data\results2.csv
+INPUT ressources\data\results3.csv
+INPUT ressources\data\results3.csv
 INPUT ressources\data\results3.csv
 INPUT ressources\data\results4.csv
+INPUT ressources\data\results4.csv
+INPUT ressources\data\results4.csv
 INPUT ressources\data\results.csv
 INPUT ressources\data\results.csv
 INPUT ressources\data\results.csv
-INPUT ressources\data\results.csv
+INPUT ressources\data\results2.csv
+INPUT ressources\data\results2.csv
+INPUT ressources\data\results2.csv
+INPUT ressources\data\results3.csv
+INPUT ressources\data\results3.csv
+INPUT ressources\data\results3.csv
+INPUT ressources\data\results4.csv
+INPUT ressources\data\results4.csv
+INPUT ressources\data\results4.csv
 INPUT ressources\data\boxplot.csv
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8t.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr7t.tfm
@@ -637,6 +671,8 @@ INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zpl
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zplmr7y.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zplmr7v.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zplmr7v.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zplmb7t.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\zplmb7t.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\amsfonts\symbols\msam10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\amsfonts\symbols\msam10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\amsfonts\symbols\msbm10.tfm
@@ -653,29 +689,32 @@ INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\public\mathpazo\zplmr7y.vf
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\cm\cmsy10.tfm
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\public\mathpazo\fplmr.tfm
+INPUT ressources\data\normal_eval.csv
+INPUT ressources\data\normal_eval.csv
+INPUT ressources\data\normal_eval.csv
+INPUT ressources\data\normal_eval2.csv
+INPUT ressources\data\normal_eval2.csv
+INPUT ressources\data\normal_eval2.csv
+INPUT ressources\data\normal_eval.csv
+INPUT ressources\data\normal_eval.csv
+INPUT ressources\data\normal_eval.csv
+INPUT ressources\data\normal_eval2.csv
+INPUT ressources\data\normal_eval2.csv
+INPUT ressources\data\normal_eval2.csv
 INPUT .\conclusion.tex
 INPUT .\conclusion.tex
 OUTPUT conclusion.aux
 INPUT .\conclusion.tex
 INPUT .\conclusion.tex
 INPUT conclusion.tex
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplri8t.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplri8t.vf
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplrc8t.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\tex\latex\psnfss\t1pcr.fd
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\tex\latex\psnfss\t1pcr.fd
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\tex\latex\psnfss\t1pcr.fd
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\courier\pcrr8t.tfm
 INPUT .\appendix.tex
 INPUT .\appendix.tex
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplrc8t.vf
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplr8r.tfm
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\courier\pcrr8t.vf
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\courier\pcrr8r.tfm
 OUTPUT appendix.aux
 INPUT .\appendix.tex
 INPUT .\appendix.tex
 INPUT appendix.tex
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\tfm\adobe\palatino\pplri8t.tfm
+INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\vf\adobe\palatino\pplri8t.vf
 INPUT .\_eidversicherung.tex
 INPUT .\_eidversicherung.tex
 INPUT .\_eidversicherung.tex
@@ -722,8 +761,6 @@ INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\public\mathpazo\f
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\public\mathpazo\fplmr.pfb
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\public\mathpazo\fplmri.pfb
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\public\mathpazo\fplmri.pfb
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\urw\courier\ucrr8a.pfb
-INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\urw\courier\ucrr8a.pfb
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\urw\palatino\uplb8a.pfb
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\urw\palatino\uplb8a.pfb
 INPUT C:\Users\danie\AppData\Local\Programs\MiKTeX\fonts\type1\urw\palatino\uplr8a.pfb
diff --git a/masterthesis.log b/masterthesis.log
index 825a8e00ef89d769cd4edb2beffe0d4b544fcd2b..0e252a7df815121d0e785675a8d4d85d0ab77005 100644
--- a/masterthesis.log
+++ b/masterthesis.log
@@ -1,4 +1,4 @@
-This is pdfTeX, Version 3.141592653-2.6-1.40.26 (MiKTeX 24.4) (preloaded format=pdflatex 2024.5.6)  22 JUN 2024 00:22
+This is pdfTeX, Version 3.141592653-2.6-1.40.26 (MiKTeX 24.4) (preloaded format=pdflatex 2024.5.6)  20 JUL 2024 11:07
 entering extended mode
  restricted \write18 enabled.
  file:line:error style messages enabled.
@@ -975,22 +975,28 @@ LaTeX Font Info:    Trying to load font information for T1+ppl on input line 86.
 File: t1ppl.fd 2001/06/04 font definitions for T1/ppl.
 ) (masterthesis.aux (_titlepage.aux) (introduction.aux) (methodology.aux) (related_work.aux) (dataset.aux) (experiments.aux
 
-LaTeX Warning: Label `fig:enter-label' multiply defined.
+LaTeX Warning: Label `fig:prompt1' multiply defined.
 
 
-LaTeX Warning: Label `sub@fig:enter-label' multiply defined.
+LaTeX Warning: Label `sub@fig:prompt1' multiply defined.
 
 
-LaTeX Warning: Label `fig:enter-label' multiply defined.
+LaTeX Warning: Label `fig:prompt2' multiply defined.
 
 
-LaTeX Warning: Label `sub@fig:enter-label' multiply defined.
+LaTeX Warning: Label `sub@fig:prompt2' multiply defined.
 
 
-LaTeX Warning: Label `fig:enter-label' multiply defined.
+LaTeX Warning: Label `fig:prompt3' multiply defined.
 
 
-LaTeX Warning: Label `sub@fig:enter-label' multiply defined.
+LaTeX Warning: Label `sub@fig:prompt3' multiply defined.
+
+
+LaTeX Warning: Label `fig:prompt4' multiply defined.
+
+
+LaTeX Warning: Label `sub@fig:prompt4' multiply defined.
 
 
 LaTeX Warning: Label `fig:boxplot' multiply defined.
@@ -998,6 +1004,36 @@ LaTeX Warning: Label `fig:boxplot' multiply defined.
 
 LaTeX Warning: Label `sub@fig:boxplot' multiply defined.
 
+
+LaTeX Warning: Label `fig:prompts2' multiply defined.
+
+
+LaTeX Warning: Label `fig:prompt1' multiply defined.
+
+
+LaTeX Warning: Label `sub@fig:prompt1' multiply defined.
+
+
+LaTeX Warning: Label `fig:prompt2' multiply defined.
+
+
+LaTeX Warning: Label `sub@fig:prompt2' multiply defined.
+
+
+LaTeX Warning: Label `fig:prompt1' multiply defined.
+
+
+LaTeX Warning: Label `sub@fig:prompt1' multiply defined.
+
+
+LaTeX Warning: Label `fig:prompt2' multiply defined.
+
+
+LaTeX Warning: Label `sub@fig:prompt2' multiply defined.
+
+
+LaTeX Warning: Label `fig:prompts2' multiply defined.
+
 ) (conclusion.aux) (appendix.aux))
 \openout1 = `masterthesis.aux'.
 
@@ -1212,85 +1248,121 @@ Underfull \hbox (badness 10000) in paragraph at lines 14--16
 
 ]
 chapter 2.
-[3]
+
+LaTeX Warning: Citation '10.1007/3-540-36127-8_20' on page 3 undefined on input line 4.
+
+
+LaTeX Warning: Citation 'brahman-etal-2021-characters-tell' on page 3 undefined on input line 7.
+
+
+LaTeX Warning: Citation 'schroder-etal-2021-neural' on page 3 undefined on input line 30.
+
+[3] [4]
 Underfull \hbox (badness 10000) in paragraph at lines 68--69
 
  []
 
-[4]
-Underfull \hbox (badness 10000) in paragraph at lines 73--74
 
- []
+Underfull \hbox (badness 10000) in paragraph at lines 70--71
 
+ []
 
-Underfull \hbox (badness 10000) in paragraph at lines 75--76
 
+Overfull \hbox (11.50241pt too wide) in paragraph at lines 72--74
+[]\T1/ppl/m/n/10.95 (https://huggingface.co/intfloat/e5-mistral-7b-instruct) ex-pand... To im-pro-ve upon this,
  []
 
 
-Underfull \hbox (badness 10000) in paragraph at lines 94--95
+Underfull \hbox (badness 10000) in paragraph at lines 72--74
 
  []
 
 
-Underfull \vbox (badness 10000) has occurred while \output is active []
+LaTeX Warning: Citation 'schroder-etal-2021-neural' on page 5 undefined on input line 75.
 
- [5]
-Underfull \hbox (badness 10000) in paragraph at lines 96--97
 
- []
+LaTeX Warning: Citation 'dobrovolskii-2021-word' on page 5 undefined on input line 75.
 
 
-Overfull \hbox (11.50241pt too wide) in paragraph at lines 98--100
-[]\T1/ppl/m/n/10.95 (https://huggingface.co/intfloat/e5-mistral-7b-instruct) ex-pand... To im-pro-ve upon this,
+Underfull \hbox (badness 10000) in paragraph at lines 75--76
+
  []
 
 
-Underfull \hbox (badness 10000) in paragraph at lines 98--100
+Underfull \hbox (badness 10000) in paragraph at lines 77--78
 
  []
 
-
-Underfull \hbox (badness 10000) in paragraph at lines 101--102
+[5]
+Underfull \hbox (badness 10000) in paragraph at lines 83--84
 
  []
 
 
-Underfull \hbox (badness 10000) in paragraph at lines 103--104
+Underfull \hbox (badness 10000) in paragraph at lines 85--86
 
  []
 
+) [6]
+\openout2 = `related_work.aux'.
+
+ (related_work.tex
+chapter 3.
+<ressources/images/Transformer.png, id=112, 360.74776pt x 522.15076pt>
+File: ressources/images/Transformer.png Graphic file (type png)
+<use ressources/images/Transformer.png>
+Package pdftex.def Info: ressources/images/Transformer.png  used on input line 22.
+(pdftex.def)             Requested size: 170.71652pt x 247.09998pt.
+
+LaTeX Warning: `h' float specifier changed to `ht'.
+
+
+Underfull \vbox (badness 6575) has occurred while \output is active []
+
+ [7
+
 
-Underfull \hbox (badness 10000) in paragraph at lines 109--110
+
+
+
+]
+LaTeX Font Info:    Trying to load font information for TS1+ppl on input line 35.
+ (C:\Users\danie\AppData\Local\Programs\MiKTeX\tex/latex/psnfss\ts1ppl.fd
+File: ts1ppl.fd 2001/06/04 font definitions for TS1/ppl.
+) [8 <./ressources/images/Transformer.png>] [9]
+Underfull \hbox (badness 10000) in paragraph at lines 64--65
 
  []
 
-[6]
-Underfull \hbox (badness 10000) in paragraph at lines 111--112
 
+Overfull \hbox (2.87383pt too wide) in paragraph at lines 79--80
+\T1/ppl/m/n/10.95 tasks with re-la-tive-ly small amounts of da-ta. Fine-tuning in-vol-ves ad-ju-sting the pre-trained
  []
 
-) [7]
-\openout2 = `related_work.aux'.
 
- (related_work.tex [8
+Underfull \hbox (badness 10000) in paragraph at lines 85--86
 
+ []
 
+[10]
+Underfull \hbox (badness 10000) in paragraph at lines 90--92
 
+ []
 
 
-]
-chapter 3.
-) [9]
-\openout2 = `dataset.aux'.
+Underfull \hbox (badness 10000) in paragraph at lines 94--95
 
- (dataset.tex [10
+ []
 
 
+Underfull \hbox (badness 10000) in paragraph at lines 96--97
 
+ []
 
+[11]) [12]
+\openout2 = `dataset.aux'.
 
-]
+ (dataset.tex
 chapter 4.
 
 Underfull \hbox (badness 10000) in paragraph at lines 3--5
@@ -1303,219 +1375,260 @@ Underfull \hbox (badness 10000) in paragraph at lines 6--7
  []
 
 
-Underfull \hbox (badness 10000) in paragraph at lines 8--9
+Underfull \hbox (badness 10000) in paragraph at lines 10--11
 
  []
 
+[13
 
-Underfull \hbox (badness 10000) in paragraph at lines 10--11
 
- []
 
-[11]
-Underfull \hbox (badness 10000) in paragraph at lines 44--45
 
- []
 
-) [12]
+]) [14]
 \openout2 = `experiments.aux'.
 
  (experiments.tex
 chapter 5.
 
-Underfull \hbox (badness 10000) in paragraph at lines 8--9
-
- []
+Package caption Warning: \label without proper reference on input line 1256.
+See the caption package documentation for explanation.
 
 
-Underfull \vbox (badness 10000) has occurred while \output is active []
+LaTeX Warning: Reference `fig:prompt2' on page 15 undefined on input line 8.
 
- [13
 
+Underfull \hbox (badness 10000) in paragraph at lines 8--9
 
+ []
 
+[15
 
 
-]
-Underfull \vbox (badness 10000) has occurred while \output is active []
 
- [14]
-Overfull \hbox (4.45782pt too wide) in paragraph at lines 47--50
-\T1/ppl/m/n/10.95 (https://huggingface.co/ikawrakow/mixtral-instruct-8x7b-quantized-gguf) weights and
- []
 
 
-Underfull \hbox (badness 10000) in paragraph at lines 47--50
+]
+Underfull \hbox (badness 10000) in paragraph at lines 51--53
 
  []
 
+[16]
+PGFPlots: reading {ressources/data/results.csv}
+PGFPlots: reading {ressources/data/results.csv}
 PGFPlots: reading {ressources/data/results.csv}
 
-Overfull \hbox (24.08116pt too wide) in paragraph at lines 298--298
+Overfull \hbox (35.46722pt too wide) in paragraph at lines 338--338
  [][] 
  []
 
 PGFPlots: reading {ressources/data/results2.csv}
+PGFPlots: reading {ressources/data/results2.csv}
+PGFPlots: reading {ressources/data/results2.csv}
 
-Overfull \hbox (5.12521pt too wide) in paragraph at lines 298--298
+Overfull \hbox (16.53752pt too wide) in paragraph at lines 338--338
  [][] 
  []
 
 PGFPlots: reading {ressources/data/results3.csv}
+PGFPlots: reading {ressources/data/results3.csv}
+PGFPlots: reading {ressources/data/results3.csv}
 
-Overfull \hbox (24.08116pt too wide) in paragraph at lines 298--298
+Overfull \hbox (35.4674pt too wide) in paragraph at lines 338--338
  [][] 
  []
 
 PGFPlots: reading {ressources/data/results4.csv}
+PGFPlots: reading {ressources/data/results4.csv}
+PGFPlots: reading {ressources/data/results4.csv}
 
-Overfull \hbox (5.12521pt too wide) in paragraph at lines 298--298
+Overfull \hbox (16.55107pt too wide) in paragraph at lines 338--338
  [][] 
  []
 
 
 Underfull \vbox (badness 10000) has occurred while \output is active []
 
- [15]
+ [17]
+PGFPlots: reading {ressources/data/results.csv}
+PGFPlots: reading {ressources/data/results.csv}
 PGFPlots: reading {ressources/data/results.csv}
 
-Overfull \hbox (30.26826pt too wide) in paragraph at lines 406--406
+Overfull \hbox (32.72394pt too wide) in paragraph at lines 623--623
  [][] 
  []
 
-PGFPlots: reading {ressources/data/results.csv}
+PGFPlots: reading {ressources/data/results2.csv}
+PGFPlots: reading {ressources/data/results2.csv}
+PGFPlots: reading {ressources/data/results2.csv}
 
-Overfull \hbox (11.31232pt too wide) in paragraph at lines 406--406
+Overfull \hbox (13.80394pt too wide) in paragraph at lines 623--623
  [][] 
  []
 
-PGFPlots: reading {ressources/data/results.csv}
+PGFPlots: reading {ressources/data/results3.csv}
+PGFPlots: reading {ressources/data/results3.csv}
+PGFPlots: reading {ressources/data/results3.csv}
 
-Overfull \hbox (30.26826pt too wide) in paragraph at lines 406--406
+Overfull \hbox (32.72394pt too wide) in paragraph at lines 623--623
  [][] 
  []
 
-PGFPlots: reading {ressources/data/results.csv}
+PGFPlots: reading {ressources/data/results4.csv}
+PGFPlots: reading {ressources/data/results4.csv}
+PGFPlots: reading {ressources/data/results4.csv}
 
-Overfull \hbox (11.31232pt too wide) in paragraph at lines 406--406
+Overfull \hbox (13.80394pt too wide) in paragraph at lines 623--623
  [][] 
  []
 
 
 Underfull \vbox (badness 10000) has occurred while \output is active []
 
- [16]
+ [18]
 PGFPlots: reading {ressources/data/boxplot.csv}
-Package pgfplots info on input line 439: boxplot: got lower quartile=4.3787117e-1
-Package pgfplots info on input line 439: boxplot: got median=5.045784e-1
-Package pgfplots info on input line 439: boxplot: got upper quartile=5.2901382e-1
-Package pgfplots info on input line 439: boxplot: got sample size=5.0e0
-Package pgfplots info on input line 439: boxplot: got lower whisker=3.828257620334625e-1
-Package pgfplots info on input line 439: boxplot: got upper whisker=6.224778294563293e-1
-Package pgfplots info on input line 439: boxplot: got lower quartile=4.72955322e-1
-Package pgfplots info on input line 439: boxplot: got median=5.3041077e-1
-Package pgfplots info on input line 439: boxplot: got upper quartile=5.4912262e-1
-Package pgfplots info on input line 439: boxplot: got sample size=5.0e0
-Package pgfplots info on input line 439: boxplot: got lower whisker=4.249446392059326e-1
-Package pgfplots info on input line 439: boxplot: got upper whisker=6.148388385772705e-1
-Package pgfplots info on input line 439: boxplot: got lower quartile=4.30826797e-1
-Package pgfplots info on input line 439: boxplot: got median=5.1449966e-1
-Package pgfplots info on input line 439: boxplot: got upper quartile=5.3637848e-1
-Package pgfplots info on input line 439: boxplot: got sample size=5.0e0
-Package pgfplots info on input line 439: boxplot: got lower whisker=3.590573072433471e-1
-Package pgfplots info on input line 439: boxplot: got upper whisker=6.09817624092102e-1
-Package pgfplots info on input line 439: boxplot: got lower quartile=4.63695297e-1
-Package pgfplots info on input line 439: boxplot: got median=5.1954956e-1
-Package pgfplots info on input line 439: boxplot: got upper quartile=5.3851013e-1
-Package pgfplots info on input line 439: boxplot: got sample size=5.0e0
-Package pgfplots info on input line 439: boxplot: got lower whisker=4.185141026973724e-1
-Package pgfplots info on input line 439: boxplot: got upper whisker=5.998854041099548e-1
-Package pgfplots info on input line 439: boxplot: got lower quartile=4.42012329e-1
-Package pgfplots info on input line 439: boxplot: got median=5.060318e-1
-Package pgfplots info on input line 439: boxplot: got upper quartile=5.2866058e-1
-Package pgfplots info on input line 439: boxplot: got sample size=5.0e0
-Package pgfplots info on input line 439: boxplot: got lower whisker=3.904823362827301e-1
-Package pgfplots info on input line 439: boxplot: got upper whisker=5.999771356582642e-1
-Package pgfplots info on input line 439: boxplot: got lower quartile=4.52795639e-1
-Package pgfplots info on input line 439: boxplot: got median=5.2607803e-1
-Package pgfplots info on input line 439: boxplot: got upper quartile=5.4694672e-1
-Package pgfplots info on input line 439: boxplot: got sample size=5.0e0
-Package pgfplots info on input line 439: boxplot: got lower whisker=3.905723392963409e-1
-Package pgfplots info on input line 439: boxplot: got upper whisker=6.137956380844116e-1
-Package pgfplots info on input line 439: boxplot: got lower quartile=4.37993164e-1
-Package pgfplots info on input line 439: boxplot: got median=4.99127045e-1
-Package pgfplots info on input line 439: boxplot: got upper quartile=5.2397919e-1
-Package pgfplots info on input line 439: boxplot: got sample size=5.0e0
-Package pgfplots info on input line 439: boxplot: got lower whisker=3.904797434806824e-1
-Package pgfplots info on input line 439: boxplot: got upper whisker=6.140472888946533e-1
-Package pgfplots info on input line 439: boxplot: got lower quartile=4.65387573e-1
-Package pgfplots info on input line 439: boxplot: got median=5.289238e-1
-Package pgfplots info on input line 439: boxplot: got upper quartile=5.4687805e-1
-Package pgfplots info on input line 439: boxplot: got sample size=5.0e0
-Package pgfplots info on input line 439: boxplot: got lower whisker=4.114735126495361e-1
-Package pgfplots info on input line 439: boxplot: got upper whisker=6.168897151947021e-1
+Package pgfplots info on input line 658: boxplot: got lower quartile=4.3787117e-1
+Package pgfplots info on input line 658: boxplot: got median=5.045784e-1
+Package pgfplots info on input line 658: boxplot: got upper quartile=5.2901382e-1
+Package pgfplots info on input line 658: boxplot: got sample size=5.0e0
+Package pgfplots info on input line 658: boxplot: got lower whisker=3.828257620334625e-1
+Package pgfplots info on input line 658: boxplot: got upper whisker=6.224778294563293e-1
+Package pgfplots info on input line 658: boxplot: got lower quartile=4.72955322e-1
+Package pgfplots info on input line 658: boxplot: got median=5.3041077e-1
+Package pgfplots info on input line 658: boxplot: got upper quartile=5.4912262e-1
+Package pgfplots info on input line 658: boxplot: got sample size=5.0e0
+Package pgfplots info on input line 658: boxplot: got lower whisker=4.249446392059326e-1
+Package pgfplots info on input line 658: boxplot: got upper whisker=6.148388385772705e-1
+Package pgfplots info on input line 658: boxplot: got lower quartile=4.30826797e-1
+Package pgfplots info on input line 658: boxplot: got median=5.1449966e-1
+Package pgfplots info on input line 658: boxplot: got upper quartile=5.3637848e-1
+Package pgfplots info on input line 658: boxplot: got sample size=5.0e0
+Package pgfplots info on input line 658: boxplot: got lower whisker=3.590573072433471e-1
+Package pgfplots info on input line 658: boxplot: got upper whisker=6.09817624092102e-1
+Package pgfplots info on input line 658: boxplot: got lower quartile=4.63695297e-1
+Package pgfplots info on input line 658: boxplot: got median=5.1954956e-1
+Package pgfplots info on input line 658: boxplot: got upper quartile=5.3851013e-1
+Package pgfplots info on input line 658: boxplot: got sample size=5.0e0
+Package pgfplots info on input line 658: boxplot: got lower whisker=4.185141026973724e-1
+Package pgfplots info on input line 658: boxplot: got upper whisker=5.998854041099548e-1
+Package pgfplots info on input line 658: boxplot: got lower quartile=4.42012329e-1
+Package pgfplots info on input line 658: boxplot: got median=5.060318e-1
+Package pgfplots info on input line 658: boxplot: got upper quartile=5.2866058e-1
+Package pgfplots info on input line 658: boxplot: got sample size=5.0e0
+Package pgfplots info on input line 658: boxplot: got lower whisker=3.904823362827301e-1
+Package pgfplots info on input line 658: boxplot: got upper whisker=5.999771356582642e-1
+Package pgfplots info on input line 658: boxplot: got lower quartile=4.52795639e-1
+Package pgfplots info on input line 658: boxplot: got median=5.2607803e-1
+Package pgfplots info on input line 658: boxplot: got upper quartile=5.4694672e-1
+Package pgfplots info on input line 658: boxplot: got sample size=5.0e0
+Package pgfplots info on input line 658: boxplot: got lower whisker=3.905723392963409e-1
+Package pgfplots info on input line 658: boxplot: got upper whisker=6.137956380844116e-1
+Package pgfplots info on input line 658: boxplot: got lower quartile=4.37993164e-1
+Package pgfplots info on input line 658: boxplot: got median=4.99127045e-1
+Package pgfplots info on input line 658: boxplot: got upper quartile=5.2397919e-1
+Package pgfplots info on input line 658: boxplot: got sample size=5.0e0
+Package pgfplots info on input line 658: boxplot: got lower whisker=3.904797434806824e-1
+Package pgfplots info on input line 658: boxplot: got upper whisker=6.140472888946533e-1
+Package pgfplots info on input line 658: boxplot: got lower quartile=4.65387573e-1
+Package pgfplots info on input line 658: boxplot: got median=5.289238e-1
+Package pgfplots info on input line 658: boxplot: got upper quartile=5.4687805e-1
+Package pgfplots info on input line 658: boxplot: got sample size=5.0e0
+Package pgfplots info on input line 658: boxplot: got lower whisker=4.114735126495361e-1
+Package pgfplots info on input line 658: boxplot: got upper whisker=6.168897151947021e-1
 LaTeX Font Info:    Font shape `U/msa/m/n' will be
-(Font)              scaled to size 10.42007pt on input line 440.
+(Font)              scaled to size 10.42007pt on input line 659.
 LaTeX Font Info:    Font shape `U/msa/m/n' will be
-(Font)              scaled to size 7.91925pt on input line 440.
+(Font)              scaled to size 7.91925pt on input line 659.
 LaTeX Font Info:    Font shape `U/msb/m/n' will be
-(Font)              scaled to size 10.42007pt on input line 440.
+(Font)              scaled to size 10.42007pt on input line 659.
 LaTeX Font Info:    Font shape `U/msb/m/n' will be
-(Font)              scaled to size 7.91925pt on input line 440.
+(Font)              scaled to size 7.91925pt on input line 659.
 
 Underfull \vbox (badness 10000) has occurred while \output is active []
 
- [17]
-Underfull \hbox (badness 10000) in paragraph at lines 710--711
+ [19]
+Underfull \hbox (badness 10000) in paragraph at lines 950--951
 
  []
 
 
-Underfull \hbox (badness 10000) in paragraph at lines 712--713
+Underfull \vbox (badness 1675) has occurred while \output is active []
+
+ [20]
+Underfull \hbox (badness 10000) in paragraph at lines 952--953
 
  []
 
-[18]
-Underfull \hbox (badness 10000) in paragraph at lines 714--715
 
+LaTeX Warning: Reference `fig:enter-label' on page 21 undefined on input line 956.
+
+[21]
+PGFPlots: reading {ressources/data/normal_eval.csv}
+PGFPlots: reading {ressources/data/normal_eval.csv}
+PGFPlots: reading {ressources/data/normal_eval.csv}
+
+Overfull \hbox (35.46722pt too wide) in paragraph at lines 1109--1109
+ [][] 
  []
 
-[19]) [20]
-\openout2 = `conclusion.aux'.
+PGFPlots: reading {ressources/data/normal_eval2.csv}
+PGFPlots: reading {ressources/data/normal_eval2.csv}
+PGFPlots: reading {ressources/data/normal_eval2.csv}
 
- (conclusion.tex
-chapter 6.
-) [21
+Overfull \hbox (16.54906pt too wide) in paragraph at lines 1109--1109
+ [][] 
+ []
 
+PGFPlots: reading {ressources/data/normal_eval.csv}
+PGFPlots: reading {ressources/data/normal_eval.csv}
+PGFPlots: reading {ressources/data/normal_eval.csv}
 
+Overfull \hbox (32.72394pt too wide) in paragraph at lines 1256--1256
+ [][] 
+ []
 
+PGFPlots: reading {ressources/data/normal_eval2.csv}
+PGFPlots: reading {ressources/data/normal_eval2.csv}
+PGFPlots: reading {ressources/data/normal_eval2.csv}
 
+Overfull \hbox (13.80394pt too wide) in paragraph at lines 1256--1256
+ [][] 
+ []
+
+
+Underfull \vbox (badness 10000) has occurred while \output is active []
+
+ [22]) [23]
+\openout2 = `conclusion.aux'.
+
+ (conclusion.tex [24
 
-] [22
 
 
 
 
 ]
-chapter without number
-LaTeX Font Info:    Trying to load font information for T1+pcr on input line 109.
-(C:\Users\danie\AppData\Local\Programs\MiKTeX\tex/latex/psnfss\t1pcr.fd
-File: t1pcr.fd 2001/06/04 font definitions for T1/pcr.
-) [23]
+chapter 6.
+) [25]
+
+LaTeX Warning: Empty bibliography on input line 109.
+
 \openout2 = `appendix.aux'.
 
- (appendix.tex [24
+(appendix.tex [26
+
+
 
 
 
 ]
 chapter 7.
-) [25] [26
+) [27] [28
 
 
 
 
-] (_eidversicherung.tex) [27
+] (_eidversicherung.tex) [29
 
 
 
@@ -1527,26 +1640,34 @@ L3 programming layer <2024-03-14>
  ***********
 
 
+LaTeX Warning: There were undefined references.
+
+
 LaTeX Warning: There were multiply-defined labels.
 
 Package rerunfilecheck Info: File `masterthesis.out' has not changed.
 (rerunfilecheck)             Checksum: D41D8CD98F00B204E9800998ECF8427E;0.
+
+Package biblatex Warning: Please (re)run Biber on the file:
+(biblatex)                masterthesis
+(biblatex)                and rerun LaTeX afterwards.
+
 Package logreq Info: Writing requests to 'masterthesis.run.xml'.
 \openout1 = `masterthesis.run.xml'.
 
  ) 
 Here is how much of TeX's memory you used:
- 50351 strings out of 474424
- 1195545 string characters out of 5741740
- 3355849 words of memory out of 5000000
- 71814 multiletter control sequences out of 15000+600000
- 598827 words of font info for 124 fonts, out of 8000000 for 9000
+ 50475 strings out of 474424
+ 1197415 string characters out of 5741740
+ 3636612 words of memory out of 5000000
+ 71928 multiletter control sequences out of 15000+600000
+ 598736 words of font info for 128 fonts, out of 8000000 for 9000
  1145 hyphenation exceptions out of 8191
  108i,20n,107p,10950b,2863s stack positions out of 10000i,1000n,20000p,200000b,200000s
-<C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmex10.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmmi10.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmr10.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmsy10.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/mathpazo/fplmr.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/mathpazo/fplmri.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/courier/ucrr8a.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplb8a.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplr8a.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplr8a.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplri8a.pfb>
-Output written on masterthesis.pdf (31 pages, 660457 bytes).
+<C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmex10.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmmi10.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmr10.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/amsfonts/cm/cmsy10.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/mathpazo/fplmr.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/public/mathpazo/fplmri.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplb8a.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplr8a.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplr8a.pfb><C:/Users/danie/AppData/Local/Programs/MiKTeX/fonts/type1/urw/palatino/uplri8a.pfb>
+Output written on masterthesis.pdf (33 pages, 961179 bytes).
 PDF statistics:
- 370 PDF objects out of 1000 (max. 8388607)
- 69 named destinations out of 1000 (max. 500000)
- 234 words of extra memory for PDF output out of 10000 (max. 10000000)
+ 374 PDF objects out of 1000 (max. 8388607)
+ 75 named destinations out of 1000 (max. 500000)
+ 247 words of extra memory for PDF output out of 10000 (max. 10000000)
 
diff --git a/masterthesis.pdf b/masterthesis.pdf
index 6062a239ee239486ea2488e1893a8ec11f4725ce..b14e795756f25fb232802eb605372134b4b0bbdd 100644
Binary files a/masterthesis.pdf and b/masterthesis.pdf differ
diff --git a/masterthesis.run.xml b/masterthesis.run.xml
index b188ef56dcfeae60140e8cbebea252f3192ecc8d..9bed8d655459bd1778a6cce401cbbf5a7540e893 100644
--- a/masterthesis.run.xml
+++ b/masterthesis.run.xml
@@ -41,7 +41,7 @@
   >
 ]>
 <requests version="1.0">
-  <internal package="biblatex" priority="9" active="0">
+  <internal package="biblatex" priority="9" active="1">
     <generic>latex</generic>
     <provides type="dynamic">
       <file>masterthesis.bcf</file>
@@ -61,7 +61,7 @@
       <file>ngerman.lbx</file>
     </requires>
   </internal>
-  <external package="biblatex" priority="5" active="0">
+  <external package="biblatex" priority="5" active="1">
     <generic>biber</generic>
     <cmdline>
       <binary>biber</binary>
diff --git a/masterthesis.synctex(busy) b/masterthesis.synctex(busy)
new file mode 100644
index 0000000000000000000000000000000000000000..f3ef28d2e5afd74ff28aa0d29937c80d955e1ea2
Binary files /dev/null and b/masterthesis.synctex(busy) differ
diff --git a/masterthesis.synctex.gz b/masterthesis.synctex.gz
deleted file mode 100644
index 93cb481ee99d7764da94a873a174c8f545f7cce3..0000000000000000000000000000000000000000
Binary files a/masterthesis.synctex.gz and /dev/null differ
diff --git a/masterthesis.toc b/masterthesis.toc
index 7ca0521da35e39809d28b37313dea68ca4bfd3c5..90a071c01f61afbed797a7274b62791835b904e1 100644
--- a/masterthesis.toc
+++ b/masterthesis.toc
@@ -1,29 +1,30 @@
 \babel@toc {german}{}\relax 
 \contentsline {chapter}{\numberline {1}Introduction}{1}{chapter.1}%
 \contentsline {chapter}{\numberline {2}Methodology}{3}{chapter.2}%
-\contentsline {section}{\numberline {2.1}Embeddings/Token}{3}{section.2.1}%
-\contentsline {section}{\numberline {2.2}The Transformer}{3}{section.2.2}%
-\contentsline {subsection}{\numberline {2.2.1}Encoder}{3}{subsection.2.2.1}%
-\contentsline {subsection}{\numberline {2.2.2}Decoder}{3}{subsection.2.2.2}%
-\contentsline {section}{\numberline {2.3}BERT}{4}{section.2.3}%
-\contentsline {subsection}{\numberline {2.3.1}Embeddings}{4}{subsection.2.3.1}%
-\contentsline {subsection}{\numberline {2.3.2}BERTScore}{5}{subsection.2.3.2}%
-\contentsline {section}{\numberline {2.4}LLMs}{6}{section.2.4}%
-\contentsline {section}{\numberline {2.5}BLEU-Score}{6}{section.2.5}%
-\contentsline {section}{\numberline {2.6}Query generation}{6}{section.2.6}%
-\contentsline {section}{\numberline {2.7}weights quantized uniserve3r llama.cpp}{7}{section.2.7}%
-\contentsline {chapter}{\numberline {3}Related Work}{9}{chapter.3}%
-\contentsline {chapter}{\numberline {4}Gathering of literature}{11}{chapter.4}%
-\contentsline {chapter}{\numberline {5}Experiments}{13}{chapter.5}%
-\contentsline {section}{\numberline {5.1}Base Experiment}{13}{section.5.1}%
-\contentsline {subsection}{\numberline {5.1.1}Results}{16}{subsection.5.1.1}%
-\contentsline {subsection}{\numberline {5.1.2}Analysis}{18}{subsection.5.1.2}%
-\contentsline {section}{\numberline {5.2}Selected Embedded chunks}{20}{section.5.2}%
-\contentsline {chapter}{\numberline {6}Conclusion}{21}{chapter.6}%
-\contentsline {section}{\numberline {6.1}summary}{21}{section.6.1}%
-\contentsline {section}{\numberline {6.2}future work}{21}{section.6.2}%
-\contentsline {section}{\numberline {6.3}note of thanks}{21}{section.6.3}%
-\contentsline {chapter}{\nonumberline Literatur}{23}{chapter*.7}%
-\contentsline {chapter}{\numberline {7}Appendix}{25}{chapter.7}%
-\contentsline {chapter}{Eidesstattliche Versicherung}{27}{chapter*.8}%
+\contentsline {section}{\numberline {2.1}Related Work}{3}{section.2.1}%
+\contentsline {subsection}{\numberline {2.1.1}Project Gutenberg}{4}{subsection.2.1.1}%
+\contentsline {section}{\numberline {2.2}RAG}{4}{section.2.2}%
+\contentsline {section}{\numberline {2.3}Query generation}{5}{section.2.3}%
+\contentsline {chapter}{\numberline {3}Related Work}{7}{chapter.3}%
+\contentsline {section}{\numberline {3.1}Tokenization}{7}{section.3.1}%
+\contentsline {section}{\numberline {3.2}The Transformer}{7}{section.3.2}%
+\contentsline {subsection}{\numberline {3.2.1}Encoder}{8}{subsection.3.2.1}%
+\contentsline {subsection}{\numberline {3.2.2}Decoder}{9}{subsection.3.2.2}%
+\contentsline {section}{\numberline {3.3}BERT}{9}{section.3.3}%
+\contentsline {subsection}{\numberline {3.3.1}Embeddings}{9}{subsection.3.3.1}%
+\contentsline {subsection}{\numberline {3.3.2}Fine-Tuning}{10}{subsection.3.3.2}%
+\contentsline {subsection}{\numberline {3.3.3}BERTScore}{10}{subsection.3.3.3}%
+\contentsline {section}{\numberline {3.4}BLEU-Score}{11}{section.3.4}%
+\contentsline {chapter}{\numberline {4}Gathering of literature}{13}{chapter.4}%
+\contentsline {chapter}{\numberline {5}Experiments}{15}{chapter.5}%
+\contentsline {section}{\numberline {5.1}Base Experiment}{15}{section.5.1}%
+\contentsline {subsection}{\numberline {5.1.1}Results}{18}{subsection.5.1.1}%
+\contentsline {subsection}{\numberline {5.1.2}Analysis}{21}{subsection.5.1.2}%
+\contentsline {section}{\numberline {5.2}Selected Embedded chunks}{22}{section.5.2}%
+\contentsline {chapter}{\numberline {6}Conclusion}{25}{chapter.6}%
+\contentsline {section}{\numberline {6.1}summary}{25}{section.6.1}%
+\contentsline {section}{\numberline {6.2}future work}{25}{section.6.2}%
+\contentsline {section}{\numberline {6.3}note of thanks}{25}{section.6.3}%
+\contentsline {chapter}{\numberline {7}Appendix}{27}{chapter.7}%
+\contentsline {chapter}{Eidesstattliche Versicherung}{29}{chapter*.13}%
 \providecommand \tocbasic@end@toc@file {}\tocbasic@end@toc@file 
diff --git a/methodology.aux b/methodology.aux
index efe139af3e9401f468b7dae48d3ed8d1c87487e1..fc9deeefab71421447cd313c1b0d0076a33d4837 100644
--- a/methodology.aux
+++ b/methodology.aux
@@ -4,19 +4,12 @@
 \@writefile{lof}{\addvspace {10\p@ }}
 \@writefile{lot}{\addvspace {10\p@ }}
 \@writefile{lol}{\addvspace {10\p@ }}
-\@writefile{toc}{\contentsline {section}{\numberline {2.1}Embeddings/Token}{3}{section.2.1}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {2.2}The Transformer}{3}{section.2.2}\protected@file@percent }
-\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.1}Encoder}{3}{subsection.2.2.1}\protected@file@percent }
-\@writefile{toc}{\contentsline {subsection}{\numberline {2.2.2}Decoder}{3}{subsection.2.2.2}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {2.3}BERT}{4}{section.2.3}\protected@file@percent }
-\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.1}Embeddings}{4}{subsection.2.3.1}\protected@file@percent }
-\@writefile{toc}{\contentsline {subsection}{\numberline {2.3.2}BERTScore}{5}{subsection.2.3.2}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {2.4}LLMs}{6}{section.2.4}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {2.5}BLEU-Score}{6}{section.2.5}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {2.6}Query generation}{6}{section.2.6}\protected@file@percent }
-\@writefile{toc}{\contentsline {section}{\numberline {2.7}weights quantized uniserve3r llama.cpp}{7}{section.2.7}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {2.1}Related Work}{3}{section.2.1}\protected@file@percent }
+\@writefile{toc}{\contentsline {subsection}{\numberline {2.1.1}Project Gutenberg}{4}{subsection.2.1.1}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {2.2}RAG}{4}{section.2.2}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {2.3}Query generation}{5}{section.2.3}\protected@file@percent }
 \@setckpt{methodology}{
-\setcounter{page}{8}
+\setcounter{page}{7}
 \setcounter{equation}{0}
 \setcounter{enumi}{0}
 \setcounter{enumii}{0}
@@ -26,7 +19,7 @@
 \setcounter{mpfootnote}{0}
 \setcounter{part}{0}
 \setcounter{chapter}{2}
-\setcounter{section}{7}
+\setcounter{section}{3}
 \setcounter{subsection}{0}
 \setcounter{subsubsection}{0}
 \setcounter{paragraph}{0}
@@ -51,7 +44,7 @@
 \setcounter{citetotal}{0}
 \setcounter{multicitecount}{0}
 \setcounter{multicitetotal}{0}
-\setcounter{instcount}{2}
+\setcounter{instcount}{0}
 \setcounter{maxnames}{3}
 \setcounter{minnames}{1}
 \setcounter{maxitems}{3}
diff --git a/methodology.tex b/methodology.tex
index 70b538a051bba229f267410e61781639e40d4e69..68061098c13d47f32753b3f5cfcf91709ac8a463 100644
--- a/methodology.tex
+++ b/methodology.tex
@@ -1,93 +1,67 @@
 \chapter{Methodology}
+\section{Related Work}
+Now first of all there already has been a decent amount of approaches for automatic text-summarization.\\
+One of the oldest and most cited papers from 2002 belongs to ``Automatic Text Summarization Using a Machine Learning Approach'' from \cite{10.1007/3-540-36127-8_20}. It describes a summarization procedure based on naive Bayes and C4.5 decision tree with different compression rates. The results where it utilizes the Naive Bayes classifier and a higher compression rate beeing more yielding better precision and recall.
 
+Creating a Characterization is quite similar to making a Summarization of character related content but could also include deductions made from the behavior of that character.
+A recent Paper from 2021 \cite{brahman-etal-2021-characters-tell} presents a dataset called LiSCU (Literary Summaries with Character Understanding) that aims to facilitate research in character-centric narrative understanding. They used techniques for Character Identification, where the goal is to identify a character's name from an anonymized description, and Character Description Generation, which involves generating a description for a given character based on a literature summary. 
 
+might exceed model limits:
+Length Truncation: Simply truncating the summary at the end.
+Coreference Truncation: Using SpanBERT to identify sentences in the summary that mention the character, focusing on these sentences.
 
-\section{Embeddings/Token}
 
 
+GPT-2: With a maximum input length of 1024 tokens.
+BART (Bidirectional and Auto-Regressive Transformers): Extended to accept up to 2048 tokens.
 
-\section{The Transformer}
-The Transformer architecture is a powerful tool in natural language processing (NLP) tasks, particularly adept at sequence-to-sequence problems like machine translation. It relies on an encoder-decoder structure to process and generate textual data.
-The encoder processes the input sequence and generates a contextualized representation for each token.
-This encoded representation is passed to the decoder.
-The decoder starts generating the output sequence one token at a time.
-At each step, it uses masked self-attention to consider previously generated tokens and encoder-decoder attention to refer back to the encoded source sentence.
-Based on this combined information, the decoder predicts the next most likely token in the output sequence.
 
-\subsection{Encoder}
+Longformer: Leveraged for its efficient encoding mechanism to handle long texts, allowing inputs up to 16,384 tokens when using the full text of books.
 
-The encoder takes an input sequence, like a sentence, and breaks it down into individual tokens (words or sub-words).
-Each token is mapped to an embedding vector, capturing its semantic meaning.
-The encoder then uses multiple encoder blocks to process the sequence.
-Each encoder block contains two sub-layers:
-Self-attention mechanism: This allows the model to understand the relationships between different words in the input sequence. It analyzes each token and assigns weights to other tokens based on their relevance. This creates a contextually rich representation for each token.
-Feed-forward neural network: This further refines the encoded representation by applying non-linear transformations.
+BLEU-4, ROUGE-n (n=1, 2), ROUGE-L F-1 scores, and BERTScore to measure similarity and quality.
 
-\subsection{Decoder}
-The decoder's role is to generate the output sequence based on the encoded representation from the encoder.
-It also uses multiple decoder blocks, similar to the encoder.
-Each decoder block has two additional sub-layers compared to the encoder block:
-Masked self-attention: Similar to the self-attention in the encoder, but it only attends to the previously generated tokens in the output sequence to avoid predicting future words based on information not yet generated.
-Encoder-decoder attention: This mechanism allows the decoder to attend to the encoded representation from the encoder, effectively looking back at the source sentence for context when generating the target sequence.
-Overall Process:
 
+performed better with length truncation
 
+Errors in coreference resolution impacted the coreference truncation performance.
 
 
 
-\section{BERT}
-BERT (Bidirectional Encoder Representations from Transformers) is a  pre-trained language representation model introduced by Devlin et al. in 2019 (ref). Its based on the Transformer architecture from\dots but instead of using in contrast to using both, an encoder and a decoder as in the original transformer, BERT only utilizes the encoder component.
-why?
+\cite*{schroder-etal-2021-neural}
+coarse-to-fine approach, which first generates coarse coreference clusters and then refines them. This method allows the model to handle the complexity of coreference resolution by breaking it down into more manageable steps.
+Two primary neural network models were developed: the base model and the large model. The large model uses the ELECTRA-large model for contextual embeddings, while the base model uses the ELECTRA-base model.
+Data Preprocessing
 
+% The models were trained on multiple datasets, including SemEval-2010, TüBa-D\\/Z, OntoNotes 5.0, and the DROC dataset. These datasets provide a diverse range of documents, which helps in training robust coreference resolution models.
+Special attention was given to handling singletons, which are mentions that do not corefer with any other mention in the document. A discard functionality was introduced to manage singletons effectively.
+Training and Evaluation:
 
-At that time, it achieved state-of-the-art results on many
-common NLP tasks such as sentiment analysis, text prediction, or named entity recognition.
+The models were trained using a variety of loss functions and optimization techniques to ensure convergence and high performance.
+The performance was measured using the CoNLL-F1 score, which is a standard metric for coreference resolution tasks.
+Results
+Performance
 
-BERT introduces two pre-training
-objectives, the masked language model objective, and the next sentence prediction objective.
-a. This pre-training involves two main tasks:
+% The coarse-to-fine models significantly outperformed previous state-of-the-art systems on both the SemEval-2010 and TüBa-D/Z datasets. The improvements were substantial, with the model achieving an increase of +25.85 F1 on SemEval-2010 and +30.25 F1 on TüBa-D\\/Z.
+% Even when compared to systems using gold mentions, which are mentions manually annotated in the dataset, the models still showed a performance increase of more than 10 F1 points.
+% Impact of Model Variations
 
-Masked Language Modeling (MLM): Predicting masked words in a sentence, considering both left and right context.
-Next Sentence Prediction (NSP): Classifying whether two sentences follow each other logically.
+% The use of the ELECTRA-large model for contextual embeddings provided a small but notable improvement over the base model, with an increase of +1.58 F1 on TüBa-D\\/Z and +1.92 F1 on SemEval-2010.
+% Different configurations and model variations were tested to analyze their impact on performance. It was found that models including a discard functionality for singletons performed better.
+% Error Analysis
 
-After pre-training, BERT can be fine-tuned for specific tasks by adding a small layer on top of the pre-trained model. This fine-tuning is much faster and cheaper than pre-training the entire model from scratch. 
+The error analysis indicated that the coarse-to-fine model generally produced accurate coreference links both locally and document-wide. However, there were frequent errors related to missed and added mentions. These errors were attributed to inconsistent training signals and the inherent complexity of coreference tasks.
+The analysis also highlighted that the model’s performance decreases as the document length increases, which aligns with previous findings in coreference resolution research.
+Visualizations and Examples:
 
-\subsection{Embeddings}
-The three matrices in BERT—token embeddings, segment embeddings, and positional embeddings—are generated as part of the model's training process. 
+The paper includes visualizations and specific examples to demonstrate the model’s predictions on unseen documents. These examples show how the model accurately predicts coreference relationships in complex sentences, validating its effectiveness in practical scenarios.
+Overall, the methods and results presented in the paper highlight the significant advancements made in coreference resolution through the use of coarse-to-fine neural network models. The study provides a comprehensive evaluation of these models, demonstrating their superiority over existing systems .
 
-For each unique Token ID (i.e. for each of the 30,522 words and subwords in the BERT Tokenizer’s vocabulary), the BERT model contains an embedding that is trained to represent that specific token. The Embedding Layer within the model is responsible for mapping tokens to their corresponding embeddings.
-
-Before a string of text is passed to the BERT model, the BERT Tokenizer is used to convert the input from a string into a list of integer Token IDs, where each ID directly maps to a word or part of a word in the original string.
-
-In addition to the Token Embeddings described so far, BERT also relies on Position Embeddings. While Token Embeddings are used to represent each possible word or subword that can be provided to the model, Position Embeddings represent the position of each token in the input sequence.
-
-The final type of embedding used by BERT is the Token Type Embedding, also called the Segment Embedding in the original BERT Paper. One of the tasks that BERT was originally trained to solve was Next Sentence Prediction. That is, given two sentences A and B, BERT was trained to determine whether B logically follows A.
-
-
-\subsection{BERTScore}
-BERTScore is an evaluation metric that utilizes the BERT model to compare texts more semantically than traditional metrics like BLEU. It leverages the contextualized embeddings provided by a pre-trained BERT model to assess the similarity between candidate and reference texts.\\
-
-The process begins by inputting both candidate and reference texts into the BERT model, which generates contextualized embeddings for each token in both texts. For each token, the similarity between its embedding and every token embedding in the comparison text is calculated using cosine similarity. This results in a similarity matrix where each entry represents the cosine similarity between the embeddings of a pair of tokens (one from the candidate sentence and one from the reference sentence).\\
-cosine similarity?
-
-The metric is computed symmetrically as follows:\\
-
-For each token embedding in the candidate sentence, find the maximum similarity score with any token embedding in the reference sentence, and average these scores across all tokens in the candidate sentence to obtain precision.\\
-
-Similarly, for each token embedding in the reference sentence, find the maximum similarity score with any token embedding in the candidate sentence, and average these scores across all tokens in the reference sentence to obtain recall.
-
-\[P_{BERT} = \frac{1}{|\hat{x}|} \sum_{\hat{x}_j\in \hat{x}} \max_{x_i \in x} x_i^T \hat{x_j} \]
-\[R_{BERT} = \frac{1}{|x|} \sum_{x_i \in x} \max_{\hat{x}_j\in \hat{x}} x_i^T \hat{x_j} \]
-
-
-
-Finally the $F_1$-score (an $F$-measure)
-is computed as the harmonic mean of precision and recall and is providing a balanced measure that considers both the model's ability to capture relevant information and its accuracy in predicting new text equally.
-
-\[F_{BERT} = 2\frac{P_{BERT}R_{BERT}}{P_{BERT} + R_{BERT}} \]
-\section{LLMs}
-\section{BLEU-Score}
+\subsection{Project Gutenberg}
+Project Gutenberg, founded in 1971 by Michael S. Hart, is one of the oldest and most extensive digital libraries, aimed at providing free access to a vast collection of over 60,000 eBooks. Hart's initiative began with the digitization of the United States Declaration of Independence, setting the stage for the project's goal of democratizing access to literature and cultural works. Named after Johannes Gutenberg, the inventor of the printing press, Project Gutenberg echoes his mission of making written works widely accessible. The Project Gutenberg Literary Archive Foundation, a non-profit organization, oversees the project's administration, legal issues, and fundraising efforts.
 
+\section{RAG}
+In contrast to their approach for Character Description Generation which required modeling long-range dependencies, I am using Retrieval-augmented generation (RAG), which is a technique to improve the quality of LLM-generated responses by grounding the model on external sources. LLMs are inconsistent in terms of producing same quality responses for each and every topic, since they knowledge is based on finite amount of information, that isn't equally distributed for every potential topic. But Retrieval-augmented generation doesn't only reduce the need for internal sources (continuous training, lowering computational and financial costs) but also ensures that the model has access to the most current, reliable facts.
+In this thesis I am primarily focusing on getting those important properties and behavior (key features) from the characters described in the literature to achieve better characterizations with grounded models that utilize this external information.
 
 
 \section{Query generation}
@@ -110,12 +84,4 @@ The generated characterizations can be evaluated both qualitatively and quantita
 
 Since language models are typically trained on extensive data, they might already contain information about certain books. To test this, we can compare queries that include key sentences to those that omit them. If the model produces the same output despite the missing key information, it suggests prior training on that data. Additionally, using books released after the model's training period ensures no pre-existing knowledge about the characters.\\
 
-Existing human-written characterizations will serve as benchmarks for assessing the model's output in terms of style, content, structure, and level of detail.
-
-
-i set up a ssh connection with the nlp-servers on the informatikum in order to be
-able to use their Llama models on their much more powerful GPUs and used HTTP-Requests
-in order to make prompts and get the results. For the weights of the model i used: 
-
-
-\section{weights quantized uniserve3r llama.cpp}
\ No newline at end of file
+Existing human-written characterizations will serve as benchmarks for assessing the model's output in terms of style, content, structure, and level of detail.
\ No newline at end of file
diff --git a/related_work.aux b/related_work.aux
index b28a198fb8bd4800aee11f809f3283e1ab5573f3..fcfaa4d7d7567bb282a59e435dd4c92b05a8573d 100644
--- a/related_work.aux
+++ b/related_work.aux
@@ -1,12 +1,22 @@
 \relax 
 \providecommand\hyper@newdestlabel[2]{}
-\@writefile{toc}{\contentsline {chapter}{\numberline {3}Related Work}{9}{chapter.3}\protected@file@percent }
+\@writefile{toc}{\contentsline {chapter}{\numberline {3}Related Work}{7}{chapter.3}\protected@file@percent }
 \@writefile{lof}{\addvspace {10\p@ }}
 \@writefile{lot}{\addvspace {10\p@ }}
 \@writefile{lol}{\addvspace {10\p@ }}
+\@writefile{toc}{\contentsline {section}{\numberline {3.1}Tokenization}{7}{section.3.1}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {3.2}The Transformer}{7}{section.3.2}\protected@file@percent }
+\@writefile{lof}{\contentsline {figure}{\numberline {3.1}{\ignorespaces transformer architecture from the original paper}}{8}{figure.caption.3}\protected@file@percent }
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.1}Encoder}{8}{subsection.3.2.1}\protected@file@percent }
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.2.2}Decoder}{9}{subsection.3.2.2}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {3.3}BERT}{9}{section.3.3}\protected@file@percent }
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.1}Embeddings}{9}{subsection.3.3.1}\protected@file@percent }
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.2}Fine-Tuning}{10}{subsection.3.3.2}\protected@file@percent }
+\@writefile{toc}{\contentsline {subsection}{\numberline {3.3.3}BERTScore}{10}{subsection.3.3.3}\protected@file@percent }
+\@writefile{toc}{\contentsline {section}{\numberline {3.4}BLEU-Score}{11}{section.3.4}\protected@file@percent }
 \@setckpt{related_work}{
-\setcounter{page}{10}
-\setcounter{equation}{0}
+\setcounter{page}{13}
+\setcounter{equation}{1}
 \setcounter{enumi}{0}
 \setcounter{enumii}{0}
 \setcounter{enumiii}{0}
@@ -15,19 +25,19 @@
 \setcounter{mpfootnote}{0}
 \setcounter{part}{0}
 \setcounter{chapter}{3}
-\setcounter{section}{0}
+\setcounter{section}{4}
 \setcounter{subsection}{0}
 \setcounter{subsubsection}{0}
 \setcounter{paragraph}{0}
 \setcounter{subparagraph}{0}
-\setcounter{figure}{0}
+\setcounter{figure}{1}
 \setcounter{table}{0}
 \setcounter{lstnumber}{1}
 \setcounter{section@level}{1}
 \setcounter{Item}{0}
 \setcounter{Hfootnote}{0}
 \setcounter{bookmark@seq@number}{0}
-\setcounter{caption@flags}{0}
+\setcounter{caption@flags}{2}
 \setcounter{continuedfloat}{0}
 \setcounter{subfigure}{0}
 \setcounter{subtable}{0}
@@ -40,7 +50,7 @@
 \setcounter{citetotal}{0}
 \setcounter{multicitecount}{0}
 \setcounter{multicitetotal}{0}
-\setcounter{instcount}{4}
+\setcounter{instcount}{0}
 \setcounter{maxnames}{3}
 \setcounter{minnames}{1}
 \setcounter{maxitems}{3}
diff --git a/related_work.tex b/related_work.tex
index 6a7d7c848d1890526815639fe72878635c1423a4..256dc2cb8daa0104f9d0b49d391f3f6db7454b6f 100644
--- a/related_work.tex
+++ b/related_work.tex
@@ -1,6 +1,142 @@
 \chapter{Related Work}
-Now first of all there already has been a decent amount of approaches for automatic text-summarization.
-One of the oldest and most cited papers belongs to "Automatic Text Summarization Using a Machine Learning Approach" \cite{10.1007/3-540-36127-8_20}. It describes a summarization procedure based on trainable Machine Learning algorithms. Creating a Characterization is quite similar to making a Summarization of character related content but could also include deductions made from the behavior of that character. 
-A recent Paper from 2021 \cite{brahman-etal-2021-characters-tell} presents a dataset called LiSCU (Literary Summaries with Character Understanding) that aims to facilitate research in character-centric narrative understanding. They used techniques for Character Identification, where the goal is to identify a character's name from an anonymized description, and Character Description Generation, which involves generating a description for a given character based on a literature summary. In contrast to their approach for Character Description Generation which required modeling long-range dependencies, I am using
-Retrieval-augmented generation (RAG), which is a technique to improve the quality of LLM-generated responses by grounding the model on external sources. LLMs are inconsistent in terms of producing same quality responses for each and every topic, since they knowledge is based on finite amount of information, that isn't equally distributed for every potential topic. But Retrieval-augmented generation doesn't only reduce the need for internal sources (continuous training, lowering computational and financial costs) but also ensures that the model has access to the most current, reliable facts.
-In this thesis I am primarily focusing on getting those important properties and behavior (key features) from the characters described in the literature to achieve better characterizations with grounded models that utilize this external information.
\ No newline at end of file
+
+\section{Tokenization}
+Tokens are the fundamental units of data processing in natural language processing (NLP). A token is the smallest meaningful unit of text, which can be a word, subword, or even a single character or punctuation mark. Tokenization is typically performed at one of three levels: single characters (character-based tokenization), subwords (subword-based tokenization), or whole words (word-based tokenization).
+
+In most modern NLP models, subword tokenization is predominantly used. This technique breaks words into smaller units, such as prefixes and suffixes. Unlike word-based tokenizers, which generate a very large vocabulary and suffer from a loss of meaning across very similar words as well as a large quantity of out-of-vocabulary tokens, or character-based tokenization, where each token has minimal meaning in context and the overall number of tokens on a tokinzed text is enormous, subword-based tokenization seeks to find a middle ground. The idea is to decompose rare words into meaningful subwords while maintaining few to single tokens for every meaningful or frequently used word.
+
+Subword tokenizers are employed in almost every widely-used large language model (LLM) such as GPT-2, Llama 3, and in large pre-trained language models like BERT.
+
+% https://huggingface.co/docs/transformers/en/tokenizer_summary
+
+
+\section{The Transformer}
+The Transformer architecture, introduced in June 2017, marked a significant advancement in natural language processing (NLP), initially focusing on sequence-to-sequence NLP problems like machine translation tasks. However, its capabilities quickly revealed a broader potential, particularly in developing large language models (LLMs). These models are trained on vast amounts of raw text using self-supervised learning, a method where the training objective is derived automatically from the input data. After that the model developed a statistically understanding of the language but still needs to be improved by e.g. masked language-modeling or causal language modeling. The Tranformer consists of a encoder and a decoder.
+% https://arxiv.org/abs/1706.03762
+
+
+
+\begin{figure}[h]
+    
+    \centering
+    \includegraphics[width=6cm]{ressources/images/Transformer.png}
+    \caption{transformer architecture from the original paper}
+    \end{figure}
+\subsection{Encoder}
+The encoder takes an input sequence, and breaks it down into individual tokens (words or sub-words).
+For each token an embedding vector is computed, which is a numerical representation of that token, capturing its semantic meaning.
+
+A key component of the encoder is the self-attention mechanism. Self-attention enables the model to consider the entire sequence when encoding each token, allowing it to weigh the relevance of other tokens in the input sequence dynamically. For each token, the self-attention mechanism computes attention scores that determine the influence of all other tokens in the sequence. So the generated embedded vector for each token does not only represent the token alone but also its left and right contextual influence.
+
+
+The encoder consists of multiple identical layers, or encoder blocks. Each encoder block contains two main sub-layers:
+
+\begin{itemize}
+    \item \textbf{Multi-Head Self-Attention Layer}: This sub-layer allows the model to attend to different parts of the sequence from multiple perspectives or "heads." Each head performs self-attention independently, and their outputs are concatenated and linearly transformed to provide a richer representation.
+
+    \item \textbf{Feed-Forward Layer}: After the self-attention sub-layer, each token's representation is passed through a feed-forward neural network. This layer is a simple fully connected feed-forward network applied to each position (word) in the sequence independently and identically. It consists of two linear transformations with a ReLU activation in between, allowing the model to apply non-linear transformations and further refine the encoded representation.
+\end{itemize}
+
+Both sub-layers in the encoder block are followed by residual connections and layer normalization, which help in stabilizing the training and improving convergence.
+
+\subsection{Decoder}
+The decoder works quiet similar to the encoder and can be also be used for same tasks but with respect to loss of performance. It also uses multiple decoder blocks, similar to the encoder but has two additional sub-layers per block as compared to the encoder block. In the transformer's architecture the decoder's role is to generate the output sequence based on the encoded representation from the encoder (cross-attention). This is done auto-regressively, which means that the generated computed feature-vector, which holds information about the input sequence will be tranformed by the language modelling head mapping into the next probable following word, which then will be added to the input text and then get feeded back into the decoder. The most important difference to the encoder is the masked multi-head self-attention.
+
+\begin{itemize}
+    \item \textbf{Masked Multi-Head Self-Attention Layer}:
+          Since the decoder cannot predict future words based on information not yet generated, it only attends uni-directional to the previously generated tokens in the output sequence. Therfor only the left context (for "LTR" text) is used and the right context is masked.
+\end{itemize}
+
+
+
+
+
+\section{BERT}
+BERT (Bidirectional Encoder Representations from Transformers) is a  pre-trained language representation model introduced by Devlin et al. in 2019 (ref). Its based on the Transformer architecture from\dots but instead of using in contrast to using both, an encoder and a decoder as in the original transformer, BERT only utilizes the encoder component. Consequently, unlike other large language models (LLMs), BERT cannot predict new tokens and thus is not suitable for text generation. Instead, it still achieves state-of-the-art results in tasks such as text classification, sentiment analysis, and named entity recognition. The attention scores are computed using queries, keys, and values derived from the input embeddings.
+
+\subsection{Embeddings}
+The three matrices in BERT—token embeddings, segment embeddings, and positional embeddings are generated as part of the model's training process.
+
+For each unique Token ID (i.e. for each of the 30,522 words and subwords in the BERT Tokenizer’s vocabulary), the BERT model contains an embedding that is trained to represent that specific token. The Embedding Layer within the model is responsible for mapping tokens to their corresponding embeddings.
+
+Before a string of text is passed to the BERT model, the BERT Tokenizer is used to convert the input from a string into a list of integer Token IDs, where each ID directly maps to a word or part of a word in the original string. In addition to the Token Embeddings described so far, BERT also relies on Position Embeddings. While Token Embeddings are used to represent each possible word or subword that can be provided to the model, Position Embeddings represent the position of each token in the input sequence.
+
+The final type of embedding used by BERT is the Token Type Embedding, also called the Segment Embedding in the original BERT Paper. One of the tasks that BERT was originally trained to solve was Next Sentence Prediction. That is, given two sentences A and B, BERT was trained to determine whether B logically follows A.\\
+
+BERT introduces two pre-training objectives, the masked language model objective (MLM), and the next sentence prediction objective (NSP).
+
+
+\begin{itemize}
+    \item \textbf{Masked Language Modeling (MLM)}:
+          15\% of the words in a sentence are randomly masked, and the model is trained to predict these masked words based on the context provided by the other words in the sentence. This enables BERT to learn bidirectional representations.
+
+    \item \textbf{Next Sentence Prediction (NSP)}:
+          To understand relationships between sentences, BERT is trained on pairs of sentences. Given two sentences, the model predicts whether the second sentence is the actual next sentence in the original text or a randomly chosen one. This task helps BERT capture the coherence and context between sentences.
+\end{itemize}
+
+
+\subsection{Fine-Tuning}
+After pre-training on large text corpora, BERT can be fine-tuned on specific downstream tasks with relatively small amounts of data. Fine-tuning involves adjusting the pre-trained model weights slightly to better fit the target task. This approach leverages the robust pre-trained language representations and adapts them to the specific requirements of the task at hand.
+
+
+
+
+\subsection{BERTScore}
+BERTScore is an evaluation metric that utilizes the BERT model to compare texts more semantically than traditional metrics like BLEU. It leverages the contextualized embeddings provided by a pre-trained BERT model to assess the similarity between candidate and reference texts.\\
+
+The process begins by inputting both candidate and reference texts into the BERT model, which generates contextualized embeddings for each token in both texts. For each token, the similarity between its embedding and every token embedding in the comparison text is calculated using cosine similarity
+\begin{equation}
+    \cos(\theta) = \frac{\mathbf{A} \cdot \mathbf{B}}{\|\mathbf{A}\| \|\mathbf{B}\|} = \frac{\sum_{i=1}^{n} \mathbf{A}_{i} \mathbf{B}_{i} }{\sqrt{\sum_{i=1}^{n} \mathbf{A}_{i}} \cdot \sqrt{\sum_{i=1}^{n} \mathbf{B}_{i}} }
+\end{equation}
+This results in a similarity matrix where each entry represents the cosine similarity between the embeddings of a pair of tokens (one from the candidate sentence and one from the reference sentence).\\
+
+
+The metric is computed symmetrically as follows:\\
+
+For each token embedding in the candidate sentence, find the maximum similarity score with any token embedding in the reference sentence, and average these scores across all tokens in the candidate sentence to obtain precision.\\
+
+Similarly, for each token embedding in the reference sentence, find the maximum similarity score with any token embedding in the candidate sentence, and average these scores across all tokens in the reference sentence to obtain recall.
+
+\[P_{BERT} = \frac{1}{|\hat{x}|} \sum_{\hat{x}_j\in \hat{x}} \max_{x_i \in x} x_i^T \hat{x_j} \]
+\[R_{BERT} = \frac{1}{|x|} \sum_{x_i \in x} \max_{\hat{x}_j\in \hat{x}} x_i^T \hat{x_j} \]
+
+
+
+Finally the $F_1$-score (an $F$-measure)
+is computed as the harmonic mean of precision and recall and is providing a balanced measure that considers both the model's ability to capture relevant information and its accuracy in predicting new text equally.
+
+\[F_{BERT} = 2\frac{P_{BERT}R_{BERT}}{P_{BERT} + R_{BERT}} \]
+
+\section{BLEU-Score}
+
+BLEU-Score is a different metric I use in my thesis for comparing texts. BLEU is not evaluating and comparing the semantic of the reference and candidate text but instead comparing similarity of vocabulary between them.
+
+Let $\left\{y^{1}, y^{2}, ..., y^{N}\right\}$ be the words of the reference text and $\left\{\hat{y}^{1}, \hat{y}^{2}, ..., \hat{y}^{N}\right\}$
+
+
+The first step is to create n-grams $\text{G}_n(y)$ for both texts. An n-gram is just a set of consecutive words of length n in a text.
+
+\[
+    \text{G}_n(y) = \left\{y_1, y_2, ..., y_k\right\}
+\]
+
+Next we define the function $\text{C}(s,y)$ that counts the appearances of s as a substring in y.
+Now we can count n-grams of the candidate that appear in the reference text. We can compute the clipped precision by taking the minimum of the appearances of the n-gram in $y$ and $\hat{y}$ and then dividing by the amount of all occurences of n-grams in $\hat{y}$. Therefor candidates that have the same n-gram repeating over and over again don't get a higher precision score if the same n-gram does not appear in the reference text the same amount.
+
+\[
+    \text{p}_n(\hat{y} , y) = \frac{\sum_{s \in G_n(\hat{y})} \min(\text{C}(s,\hat{y}), \text{C}(s,y))}{\sum_{s \in G_n(\hat{y})} \text{C}(s,\hat{y})}
+\]
+
+
+Right now short candidate texts are more likely to get a good score although the reference text is much longer. Therefor we add a brevity penalty in order to give higher scores to texts that are closer or even longer to the reference texts real size.
+\[
+    \text{BP}(c, r) = \left\{\begin{array}{lr}
+        1,               & \text{if } c > r    \\
+        \ e^{(1 - r/c)}, & \text{if } c \leq r \\
+    \end{array}\right\}
+\]
+
+Finally for BLEU-Score we combine the brevity penalty with the clipped precision of n-grams. We additionally add a distribution vector to weigh each $ \text{p}_n$ by $w_n$ in order to have the opportunity to give n-grams with different $n$ also a different impact on the overall result. Although in the end most BLEU-Scores just use a uniform distribution with $N = 4$ so that $w_n$ always stays $\frac{1}{4}$
+
+\[\text{BLEU} = \text{BP}(c, r) \cdot \exp\left(\sum_{n=1}^{N}  \text{w}_n \cdot \ln(p_n)\right)\]
+
diff --git a/ressources/data/normal_eval.csv b/ressources/data/normal_eval.csv
new file mode 100644
index 0000000000000000000000000000000000000000..71eb7be100928cf28928deed5b6fe9f4c6158a32
--- /dev/null
+++ b/ressources/data/normal_eval.csv
@@ -0,0 +1,541 @@
+Book,Character,F1,h,wF1,wh
+The Lord of the Rings,Gimli,0.5303179025650024,0.0018958562640058354,0.5486525297164917,0.007278140322566057
+The Lord of the Rings,Elrond,0.5225498080253601,2.1405124896213987,0.5052364468574524,2.5439314310177656
+The Lord of the Rings,Arwen,0.5495935678482056,0.09176155749094073,0.5545570254325867,0.43261731214681853
+The Lord of the Rings,Smaug,0.49510568380355835,1.369934426713461,0.5086314678192139,1.0657703146783388
+The Lord of the Rings,Frodo Baggins,0.45463496446609497,1.7198854026994104,0.4846213757991791,1.250914683541555
+The Lord of the Rings,Samwise Gamgee,0.5255295634269714,0.00021279833202228525,0.5717642307281494,0.0005201358644656519
+The Lord of the Rings,Bilbo Baggins,0.4629913568496704,0.7427334673811882,0.4710036516189575,0.7643137653901765
+The Lord of the Rings,Gandalf,0.493848979473114,0.7690076168971675,0.48975974321365356,2.593182433548461
+The Lord of the Rings,Ancalagon,0.477145254611969,0.01104810389980852,0.5791114568710327,2.0878972244703298
+The Lord of the Rings,Shadowfax,0.558815598487854,0.22685585749271228,0.5979445576667786,1.6379879568324147
+The Lord of the Rings,Aragorn II,0.514411449432373,1.2754050915206687,0.49165078997612,1.2727238428741576
+The Lord of the Rings,Balin,0.5572898387908936,6.873337123824756e-05,0.5653225183486938,0.010761507796556791
+The Lord of the Rings,Bard,0.5038110613822937,0.00444677265710345,0.5618695616722107,0.006853507291056744
+The Lord of the Rings,Beorn,0.4574282765388489,7.964547045271094e-06,0.6069895625114441,0.5130365078530041
+The Lord of the Rings,Boromir,0.567672073841095,0.026939631327486366,0.542649507522583,0.031440763873515874
+The Lord of the Rings,Celeborn,0.5386183857917786,8.960520794475933e-07,0.5627967715263367,0.002457179271147332
+The Lord of the Rings,Celebrimbor,0.5008432269096375,0.30550488193474995,0.5453246831893921,4.317380873481824
+The Lord of the Rings,Denethor II,0.5466106534004211,0.1223507951938338,0.5836129784584045,0.3042194992237594
+The Lord of the Rings,Éomer,0.5421885848045349,0.01706438066626261,0.5727627873420715,0.07651650067203127
+The Lord of the Rings,Éowyn,0.5354081392288208,0.04466707846071376,0.5591710209846497,0.08089813447606473
+The Lord of the Rings,Faramir,0.49136117100715637,0.8622361224149409,0.4823136627674103,1.0867150159587093
+The Lord of the Rings,Galadriel,0.4945986270904541,1.1028929795254774,0.498959481716156,2.6947653102196303
+The Lord of the Rings,Gollum,0.5312132835388184,0.0012175108716184046,0.5351508259773254,0.00026087581220733615
+The Lord of the Rings,Gríma,0.5651068687438965,0.026308131149234124,0.5924745798110962,0.06891192018784154
+The Lord of the Rings,Halbarad,0.5482879281044006,0.03772402240858696,0.5678908228874207,1.2505198519541443
+The Lord of the Rings,Isildur,0.5536698698997498,0.00014058355552045226,0.5754685997962952,0.0019145160582926945
+The Lord of the Rings,Fíli and Kíli,0.535077691078186,0.03906636833126219,0.6048795580863953,0.15158815490377478
+The Lord of the Rings,Legolas,0.4352049231529236,0.46794684410900483,0.4515453577041626,1.3105177750316803
+The Lord of the Rings,Lúthien,0.42408517003059387,1.3768888823489491e-10,0.5941258072853088,0.14628311080819084
+The Lord of the Rings,Maedhros,0.5035333037376404,2.6845660125638523e-09,0.5534624457359314,0.0007071572139962113
+The Lord of the Rings,Melian,0.47052446007728577,0.0005430458619563076,0.5702692866325378,1.151224722893621
+The Lord of the Rings,Meriadoc Brandybuck,0.561441957950592,0.0648408954241231,0.5652663707733154,0.9112354140815887
+The Lord of the Rings,Peregrin Took,0.5437358021736145,0.05427278725195254,0.5529078841209412,0.1864921831796264
+The Lord of the Rings,Radagast,0.5174330472946167,0.0001749764985500619,0.5598633289337158,0.003216700358738188
+The Lord of the Rings,Saruman,0.4741404950618744,0.8696719536245657,0.4654025435447693,1.2235656275977627
+The Lord of the Rings,Sauron,0.47842350602149963,1.6349855311995525,0.473971426486969,3.107573119386748
+The Lord of the Rings,Shelob,0.5592805743217468,0.07584053063781951,0.5853018164634705,0.34060079450478153
+The Lord of the Rings,Théoden,0.5443832278251648,0.07420759137582812,0.5772528052330017,0.18079678755351167
+The Lord of the Rings,Thingol,0.5081369876861572,4.428451715613721e-05,0.5225947499275208,0.005517349793246728
+The Lord of the Rings,Thranduil,0.4878326654434204,1.3411993964995826e-27,0.5475040078163147,0.003206411266392512
+The Lord of the Rings,Thrór,0.4825860559940338,7.743723490880978e-06,0.5413334965705872,0.24044709661082322
+The Lord of the Rings,Thorin II,0.47796374559402466,1.9999888091541057e-19,0.5533391833305359,1.7823173898844698e-06
+The Lord of the Rings,Tom Bombadil,0.5839139819145203,3.449736913810358e-05,0.574947714805603,0.0004803173822170903
+The Lord of the Rings,Treebeard,0.5894725322723389,0.037430463655111455,0.5587906837463379,0.6738879374362026
+The Lord of the Rings,Túrin,0.49322354793548584,1.9729607394382167e-10,0.4993710517883301,2.1667083816749173e-05
+The Lord of the Rings,Ungoliant,0.5248897075653076,0.006889886168928653,0.55659019947052,0.4206666712385319
+The Lord of the Rings,Morgoth,0.45803698897361755,1.168327412862539,0.49801531434059143,1.836967770407093
+The Lord of the Rings,Watcher in the Water,0.5586118102073669,0.0005535213703505581,0.5981280207633972,0.005104543353622818
+The Lord of the Rings,Gil-galad,0.49370089173316956,2.8875881748037804e-14,0.5870011448860168,0.004151112707898806
+The Lord of the Rings,Círdan,0.4970964193344116,1.1359558026426888e-15,0.5533652901649475,0.0023556687549475264
+The Lord of the Rings,Khamûl,0.5273112654685974,0.3111839518759664,0.5779586434364319,1.5488645441375017
+The Lord of the Rings,Thráin II,0.4682380259037018,6.821370762956645e-09,0.5730043649673462,0.022696313322523375
+The Lord of the Rings,Glaurung,0.49256759881973267,0.010504363729391671,0.5601674318313599,0.5209951886978874
+The Lord of the Rings,Haldir,0.5636138319969177,0.3369018557603518,0.5684061646461487,0.9064526162103486
+The Lord of the Rings,Eärendil,0.551388680934906,0.022742724622693146,0.6047641038894653,0.4545111505499469
+The Lord of the Rings,Glorfindel,0.5649725198745728,2.2217178591704628e-06,0.5694261789321899,0.003514182136364015
+The Lord of the Rings,Gothmog,0.5325398445129395,0.18188013587276972,0.543887734413147,0.9029937752524297
+The Lord of the Rings,Beregond,0.5386419296264648,2.183722265159242,0.5324291586875916,3.6924625443719
+The Lord of the Rings,Berúthiel,0.42371782660484314,2.8041899631891774e-18,0.5517565011978149,0.097690752263447
+The Lord of the Rings,Turgon,0.47251227498054504,0.0942599245494071,0.5610125064849854,0.5115538130022425
+The Lord of the Rings,Elendil,0.5107694864273071,0.00011445544869784365,0.5504311323165894,0.018159865037368727
+The Lord of the Rings,Beren,0.4936663806438446,0.0013168984976569065,0.5609504580497742,0.2964469737914797
+The Lord of the Rings,Finrod,0.4899004101753235,1.891110828027117e-08,0.5585615634918213,0.07706352324734694
+The Lord of the Rings,Fingolfin,0.47078388929367065,3.1593558430299197e-24,0.5603159070014954,0.0006270312675563591
+The Lord of the Rings,Fingon,0.48401978611946106,0.00046293403360183726,0.5613618493080139,0.2697719276485892
+The Lord of the Rings,Gilraen,0.4580308794975281,0.004944468498240691,0.5713173747062683,3.87055853005025
+The Lord of the Rings,Morwen,0.5053345561027527,0.00025010015622384794,0.5570681691169739,1.0455979320774023
+The Lord of the Rings,Idril,0.5022085309028625,0.24011716128773278,0.5832176208496094,2.986286610186788
+The Lord of the Rings,Glóin,0.5324459671974182,0.09565166899337406,0.5635282397270203,2.9661555721979833
+The Lord of the Rings,Ecthelion,0.4401785731315613,0.0001392145475183653,0.44803524017333984,0.49961228195695695
+The Lord of the Rings,Forlong,0.5688490271568298,0.5602252154217972,0.5871270895004272,4.106717452924327
+The Lord of the Rings,Húrin,0.46524012088775635,6.992488981995174e-06,0.5215151906013489,0.006185180459347364
+The Lord of the Rings,Huor,0.46776753664016724,0.06732621508894855,0.5642777681350708,3.353537717036954
+The Lord of the Rings,Lindir,0.4819457530975342,0.5342962044084377,0.5809340476989746,6.041185807057358
+The Lord of the Rings,Erestor,0.565351665019989,3.485846912651825,0.586430549621582,4.9764824193494395
+The Lord of the Rings,Elladan and Elrohir,0.572724461555481,0.088177189742486,0.6065909266471863,0.6361087823379934
+The Lord of the Rings,Glóredhel,0.4629199206829071,0.8536328538441629,0.49852535128593445,1.6292637426017123
+The Lord of the Rings,Eöl,0.4756379723548889,0.0004635115474672892,0.5275198221206665,0.29199451623786776
+The Lord of the Rings,Erendis,0.4563486874103546,0.07722385243961652,0.5786224007606506,1.7736845457585755
+The Lord of the Rings,Fëanor,0.4401758313179016,0.6147414238801957,0.4709019958972931,0.8933878134903198
+The Lord of the Rings,Aredhel,0.4630291759967804,0.0006045999411397055,0.5278006792068481,0.5368354153736876
+The Lord of the Rings,Caranthir,0.5198797583580017,0.14574014133635002,0.5769110918045044,1.924008518023196
+The Lord of the Rings,Curufin,0.48771682381629944,1.9260558979402237e-05,0.5782928466796875,0.636370822793572
+The Lord of the Rings,Amras,0.4840487539768219,0.07831594780818153,0.576225221157074,4.422656924847655
+The Lord of the Rings,Amrod,0.4492732584476471,0.0033548290563057627,0.5318185687065125,0.5419955258409143
+The Lord of the Rings,Amlach,0.46319329738616943,1.0475817682608122,0.5794084072113037,4.618059174231231
+The Lord of the Rings,Angrod,0.4995581805706024,0.6376961769341987,0.5718408226966858,7.675932767872278
+The Lord of the Rings,Aegnor,0.4741942882537842,0.002581700694746893,0.5661616325378418,2.077555700438521
+The Lord of the Rings,Handir,0.4704637825489044,1.4118870664589966,0.5968661308288574,13.082637572002591
+The Lord of the Rings,Hareth,0.48880234360694885,1.208471555754901,0.5098549127578735,2.0283251634894808
+The Lord of the Rings,Hador,0.47143059968948364,0.004290333985831063,0.5834735631942749,2.677397512827921
+The Lord of the Rings,Haleth,0.5201858878135681,0.2793668133256367,0.5622618198394775,4.641685740897057
+The Lord of the Rings,Baragund,0.5109097361564636,0.9868341704482895,0.5133917331695557,1.245646248915543
+The Lord of the Rings,Barahir,0.5018305778503418,0.20837525189564643,0.5540673732757568,1.8961209541827748
+The Lord of the Rings,Baran,0.4734276533126831,2.0087748000837657,0.49618831276893616,2.401055689983004
+The Lord of the Rings,Belemir,0.4510476291179657,0.18053203988050995,0.5369394421577454,3.028884621598889
+The Lord of the Rings,Celegorm,0.4814887046813965,0.018810964614706677,0.5747490525245667,2.4956213743481626
+The Lord of the Rings,Carcharoth,0.5387466549873352,0.2734467447511363,0.5697872042655945,2.109973094597627
+The Lord of the Rings,Gelmir,0.46213892102241516,0.22106298007615877,0.5793829560279846,4.179069700546927
+The Lord of the Rings,Galdor,0.5158555507659912,1.2485904053333363,0.5514740943908691,2.435062548697424
+The Lord of the Rings,Gwindor,0.4787231385707855,0.0009883692054017572,0.5721768736839294,2.4641428150898292
+The Lord of the Rings,Eilinel,0.4771568775177002,0.964949530269156,0.48966920375823975,1.2176584163368669
+The Lord of the Rings,Finduilas,0.45200368762016296,0.0022644301662218405,0.5315022468566895,0.8535603185410245
+The Lord of the Rings,Rían,0.451631635427475,0.1739800001262709,0.5608115196228027,2.382336701816836
+The Lord of the Rings,Nienor,0.4857916533946991,0.060125919803259285,0.5431424379348755,1.1567590469088522
+The Lord of the Rings,Mablung,0.4288769066333771,8.810829254277746e-06,0.5587544441223145,0.2949886572847551
+Harry Potter,Nymphadora Tonks,0.4425010085105896,0.4811867125932654,0.4619460105895996,0.6509497252838561
+Harry Potter,Dolores Umbridge,0.42945197224617004,1.5444118006253529,0.4487933814525604,4.498057380957531
+Harry Potter,Fleur Delacour,0.45403963327407837,0.44788354843643563,0.4549606740474701,0.8759612594302569
+Harry Potter,Sirius Black,0.4285387396812439,0.4475858904917092,0.4315895736217499,0.5084066666657757
+Harry Potter,Remus Lupin,0.4528217017650604,0.5047202210524844,0.4453299343585968,4.25963952666337
+Harry Potter,Minerva McGonagall,0.37532657384872437,0.3901990986916779,0.44724151492118835,0.833021457282659
+Harry Potter,Neville Longbottom,0.44247835874557495,3.249958889477173,0.4544924199581146,3.233556544168879
+Harry Potter,Helga Hufflepuff,0.5581740736961365,0.7075363770838006,0.5667545199394226,0.6325972114081228
+Harry Potter,Albus Dumbledore,0.4365445673465729,1.646849726307027,0.46606212854385376,3.3627657322972975
+Harry Potter,Cedric Diggory,0.4603066146373749,1.953592998779285,0.47734349966049194,1.8800989140419708
+Harry Potter,Cho Chang,0.4494101405143738,1.524332607130559,0.45505595207214355,1.7492911733909369
+Harry Potter,Rufus Scrimgeour,0.5175845623016357,4.594655846928269e-05,0.569431483745575,0.00011339995568707924
+Harry Potter,Arabella Figg,0.5579614639282227,0.011121597022581963,0.5527403950691223,0.019994300211661695
+Harry Potter,Sybill Trelawney,0.47237637639045715,3.123903345780508,0.4761386513710022,2.961366799932051
+Harry Potter,Rita Skeeter,0.4344606101512909,0.5426442446824509,0.4388675093650818,1.2415302305645854
+Harry Potter,Padma Patil,0.5182434916496277,0.0007101262299604671,0.5555219054222107,0.004473801713360513
+Harry Potter,Myrtle Warren,0.4512809216976166,4.066192632689464,0.4595744013786316,4.842941977938914
+Harry Potter,Peter Pettigrew,0.4263714849948883,1.7698315018924273,0.42987990379333496,2.9141204185400382
+Harry Potter,Bellatrix Lestrange,0.4379557967185974,0.9308910645023083,0.45057129859924316,0.7353979797059612
+Harry Potter,Alastor Moody,0.45778852701187134,0.7827104312739529,0.4531765580177307,2.274414280204072
+Harry Potter,Gilderoy Lockhart,0.43641695380210876,2.777551319897083,0.4366215765476227,3.1657195994917373
+Harry Potter,Newton Scamander,0.464682012796402,0.4607415647030731,0.45841389894485474,0.9050845747461256
+Harry Potter,Garrick Ollivander,0.532930850982666,0.00010177577866362249,0.566840410232544,5.247644224131636e-05
+Harry Potter,Pomona Sprout,0.4468885362148285,3.3633300002109787,0.4492199420928955,4.737450432005136
+Harry Potter,Cornelius Fudge,0.46478158235549927,0.940017520552656,0.4509239196777344,0.9367586104780342
+Harry Potter,Filius Flitwick,0.4493355453014374,3.36218792065598,0.4472354054450989,3.477780886766946
+Harry Potter,Dobby,0.45319893956184387,0.9744792504328931,0.45798707008361816,1.3303329283501248
+Harry Potter,Igor Karkaroff,0.40930604934692383,0.5704136732410355,0.42897629737854004,1.6171682675288797
+Harry Potter,Viktor Krum,0.49080294370651245,0.8105020259222853,0.47479119896888733,1.3773008658498516
+Harry Potter,William Weasley,0.44394412636756897,0.7589835810847033,0.453458696603775,1.1929303400411908
+Harry Potter,Parvati Patil,0.5387401580810547,8.606392441026713e-05,0.5289773941040039,1.990425812657324e-07
+Harry Potter,Dean Thomas,0.46051159501075745,1.521156044271354,0.44844067096710205,1.828053777490866
+Harry Potter,Katie Bell,0.5273134112358093,2.134555659548249e-05,0.5541184544563293,6.941469957752831e-05
+Harry Potter,Vincent Crabbe,0.5198566317558289,0.0006169246388136355,0.5248391628265381,0.0007503214476635897
+Harry Potter,Seamus Finnigan,0.45278045535087585,1.5254552835269635,0.4320342540740967,1.6758501032135402
+Harry Potter,Cormac McLaggen,0.5400038957595825,0.008215100433180482,0.530128002166748,0.00022086646044714914
+Harry Potter,Tom Riddle,0.4586656987667084,3.764603371707189,0.46474525332450867,4.146790110438455
+Harry Potter,Rubeus Hagrid,0.4429548978805542,0.757989256856053,0.4778842329978943,4.257511023281331
+Harry Potter,Nagini,0.41359958052635193,0.5475663139218998,0.410442590713501,0.6242335554914112
+Harry Potter,Ronald Weasley,0.44660866260528564,0.5717791795437712,0.4628472924232483,0.8329823782588229
+Harry Potter,Severus Snape,0.43073543906211853,1.4891996565686652,0.4365817606449127,2.645417860134607
+Harry Potter,Bartemius Crouch Senior,0.46095919609069824,2.7993025941645246,0.4624888002872467,4.843919675574981
+Harry Potter,Gabrielle Delacour,0.5198922157287598,9.240374791603473e-05,0.522936224937439,0.5014047977107204
+Harry Potter,Gellert Grindelwald,0.4071548879146576,0.711321998168285,0.4479026198387146,0.8825099327605731
+Harry Potter,Phineas Nigellus Black,0.42723262310028076,2.7866178748037788e-06,0.4604507386684418,0.00018032316359153637
+Harry Potter,Ludovic Bagman,0.5430501699447632,0.001734150790713027,0.5344049334526062,0.004227243623881541
+Harry Potter,Quirinus Quirrell,0.4647876024246216,6.197999937825962,0.4513610303401947,4.256786855713364
+Harry Potter,Hermione Granger,0.45246046781539917,0.704830134611921,0.44010305404663086,1.4039408463334415
+Harry Potter,Salazar Slytherin,0.5373809933662415,0.03488702033927176,0.5692950487136841,0.019697282842836516
+Harry Potter,Aberforth Dumbledore,0.4465094804763794,0.8878197052937502,0.47251880168914795,1.7522845718208322
+Harry Potter,Horace Slughorn,0.46476292610168457,3.7932150707817085,0.4678252041339874,4.518161437856791
+Harry Potter,Walden Macnair,0.5117467045783997,1.2047489451247158e-05,0.5371495485305786,0.007327422354681301
+Harry Potter,Ariana Dumbledore,0.5188639760017395,8.0965247623839e-10,0.5321075916290283,0.00019561825594728447
+Harry Potter,Irma Pince,0.5216736793518066,4.029566537738145e-16,0.5734816789627075,4.72127559997257e-05
+Harry Potter,Nicolas Flamel,0.5795202255249023,1.868579567554499e-06,0.5633584260940552,0.02848685774920913
+Harry Potter,Amos Diggory,0.5196719765663147,0.00035460509840973075,0.5461400747299194,0.001872444270886754
+Harry Potter,Armando Dippet,0.551811695098877,2.1802730091762353e-06,0.5678954720497131,0.001735623483430903
+Harry Potter,Lucius Malfoy,0.4325801134109497,0.6023787388627473,0.4152710437774658,0.5443745786128857
+Harry Potter,Hedwig,0.5569161176681519,0.006520389328859798,0.5601760745048523,0.04978959720192771
+Harry Potter,Alice Longbottom,0.5214443802833557,2.3711789744590406e-11,0.5788318514823914,0.27642309862441233
+Harry Potter,Pansy Parkinson,0.5118330717086792,1.7750677772365438e-05,0.5277064442634583,2.0328018267129606e-05
+Harry Potter,Kingsley Shacklebolt,0.5434129238128662,1.3996009562103873e-05,0.5604909658432007,0.00015719097894292212
+Harry Potter,Aurora Sinistra,0.5493923425674438,4.575075219822373e-06,0.5767828822135925,0.0038494379796005693
+Harry Potter,Septima Vector,0.43766435980796814,1.8845288587265717e-08,0.5453755855560303,0.03724184578896214
+Harry Potter,Nicholas de Mimsy-Porpington,0.44390130043029785,5.066341605216925,0.44866496324539185,3.394059705016283
+Harry Potter,Draco Malfoy,0.44385403394699097,0.45574509118632806,0.45091286301612854,1.890818492286858
+Harry Potter,Luna Lovegood,0.46110299229621887,1.6356681988599642,0.47391244769096375,1.9060559187027577
+Harry Potter,Ginevra Weasley,0.4370097517967224,0.26195475010326386,0.44011276960372925,0.4853234646966717
+Harry Potter,Poppy Pomfrey,0.5473053455352783,5.7536034507271794e-12,0.5645301938056946,3.1497502321319218e-06
+Harry Potter,Peeves,0.44132596254348755,2.0093993245955892,0.4392738342285156,2.2806503103401905
+Harry Potter,Oliver Wood,0.5217692255973816,2.299237431116857e-08,0.5709604620933533,2.8742628421028316e-05
+Harry Potter,Godric Gryffindor,0.5335870385169983,0.07218693586896838,0.6060956716537476,0.41742541115881143
+Harry Potter,Crookshanks,0.5731525421142578,0.008041975463329309,0.5619288086891174,0.023092071923452865
+Harry Potter,Rosmerta,0.5611692667007446,0.004808990933461774,0.5528600811958313,0.00520787205974206
+Harry Potter,Angelina Johnson,0.44134780764579773,0.48176915655976954,0.46099355816841125,1.61538573750937
+Harry Potter,Winky,0.5464995503425598,4.597214267087058e-05,0.5603477358818054,0.0001176181450069733
+Harry Potter,Rose Granger-Weasley,0.4612981677055359,1.6717813257322007e-11,0.49385079741477966,0.002333520795934221
+Harry Potter,Hugo Granger-Weasley,0.5482012033462524,0.03363222661957224,0.48715612292289734,0.09785017943562745
+Harry Potter,Petunia Dursley,0.42131248116493225,0.7796112827905537,0.43235060572624207,0.8727334192666802
+Harry Potter,Broderick Bode,0.5348936319351196,0.0005206032918184488,0.5531675219535828,0.38682106521043347
+Harry Potter,Augustus Rookwood,0.5375701785087585,0.1818283262725174,0.5363628268241882,0.2594773430935967
+Harry Potter,Piers Polkiss,0.4831104576587677,0.5206655673674491,0.4402565062046051,0.06160381724160381
+Harry Potter,Fawkes,0.5352731943130493,0.004980275060277477,0.5684974193572998,0.04184621414237621
+Harry Potter,Bartemius Crouch Junior,0.4790416359901428,3.9225961309581856,0.48085489869117737,3.138785448592562
+Harry Potter,Molly Weasley,0.4258531928062439,1.006931637473603,0.44171324372291565,1.5205089674980166
+Harry Potter,Pius Thicknesse,0.5047450661659241,0.0003785633524771387,0.5550447702407837,0.0025702108153827508
+Harry Potter,Harry Potter,0.45031216740608215,0.5536328923594419,0.4723129868507385,0.9263211683672427
+Harry Potter,Aragog,0.5313587188720703,7.815331152240772e-06,0.5461622476577759,0.07215125730281978
+Harry Potter,Mosag,0.5209478139877319,0.8554017628732911,0.43287116289138794,0.30213059508233125
+Harry Potter,James Potter II,0.4469856917858124,0.3051987092976389,0.45458748936653137,0.10387878389073239
+Harry Potter,Arthur Weasley,0.42488688230514526,0.4389978643295181,0.4442940950393677,2.36208392954761
+Harry Potter,Buckbeak,0.5568949580192566,0.0696037045558207,0.5765940546989441,0.1567409913475783
+Harry Potter,Ernest Prang,0.548900842666626,2.0732609423809656,0.5068053603172302,0.5240040239717413
+Harry Potter,Amycus Carrow,0.5384577512741089,0.008523144300981564,0.5470284223556519,0.0038905295842837848
+Harry Potter,Hepzibah Smith,0.5609691739082336,0.0009797427720681254,0.5640297532081604,0.015184323939183082
+Harry Potter,Amelia Bones,0.5153607726097107,2.3357874013152878e-05,0.5390540361404419,0.040136349683452345
+Harry Potter,Anthony Goldstein,0.4705437421798706,7.131851725246233e-06,0.5429233312606812,0.6083868480040763
+Harry Potter,Corban Yaxley,0.5089466571807861,6.34883607311603e-13,0.5473292469978333,0.00015252815004084406
+Harry Potter,Dilys Derwent,0.45186612010002136,0.07435721796537552,0.5533308982849121,2.874254418566477
+Harry Potter,Hokey,0.5817021727561951,0.07076265448179796,0.5338616371154785,0.16100695057801653
+Harry Potter,Penelope Clearwater,0.5376611948013306,0.015703203144467864,0.5196546316146851,0.30624024675731476
+Harry Potter,Selwyn,0.42949238419532776,0.004148160962221695,0.430673748254776,0.005607489792257706
+Harry Potter,Terry Boot,0.5016686916351318,1.3608514695211e-07,0.5328243970870972,0.004405688318323371
+Harry Potter,Zacharias Smith,0.5279580354690552,0.03420986984971019,0.538486659526825,0.03987487051034301
+Harry Potter,Stanley Shunpike,0.5450069904327393,0.0069697550755501315,0.5373027920722961,0.0051688833761868724
+Harry Potter,Ernest Macmillan,0.5565010905265808,0.00035789251302877155,0.43433448672294617,1.1702825280145455e-19
+Harry Potter,Eloise Midgen,0.513272762298584,0.11258254027765241,0.5407871603965759,0.7941201720850346
+Harry Potter,Griphook,0.5510381460189819,0.14518296678263054,0.54579097032547,0.1008488629812858
+Harry Potter,Alicia Spinnet,0.5427551865577698,0.0001363405761430288,0.5892655849456787,0.0011085423982836683
+Harry Potter,Ignotus Peverell,0.5293282270431519,0.0024470436681450123,0.5542071461677551,0.049894835566507825
+Harry Potter,Lee Jordan,0.43696144223213196,0.8313975125985401,0.4533984065055847,1.7652504446066097
+Harry Potter,Reginald Cattermole,0.5222092270851135,0.8745552978924823,0.5025945901870728,0.14685186584471002
+Harry Potter,Wilhelmina Grubbly-Plank,0.5482054948806763,0.046089023591390284,0.5839771032333374,0.37566003612806453
+Harry Potter,Rowena Ravenclaw,0.5359760522842407,0.06865955289653697,0.573421061038971,0.4260412068984685
+Harry Potter,Herbert Beery,0.43231260776519775,0.021471357550109614,0.4358270466327667,0.07358243796213755
+Harry Potter,Justin Finch-Fletchley,0.5083823204040527,5.515536337718215e-07,0.5567641854286194,0.0004953535160609951
+Harry Potter,Elphias Doge,0.5131416916847229,0.0011731634868792163,0.5358070731163025,0.00019686852138109848
+Harry Potter,Hannah Abbott,0.4692113697528839,1.7761814432280333e-20,0.49926626682281494,5.949654625532152e-06
+Harry Potter,Marcus Flint,0.4707847833633423,9.189966458818742e-05,0.554602324962616,0.3285182087549822
+Harry Potter,Millicent Bulstrode,0.4880232810974121,0.00038728854416620603,0.5334551334381104,0.0697731638859217
+Harry Potter,Marietta Edgecombe,0.5397696495056152,1.3486482752683525e-06,0.5401628613471985,2.143600595977801e-05
+Harry Potter,Evan Rosier,0.4763492941856384,0.08524887915393586,0.5510381460189819,4.197853513057502
+Harry Potter,Fat Friar,0.5001595616340637,0.004926166062170269,0.5600280165672302,0.012869258972551438
+Harry Potter,Rolanda Hooch,0.48695138096809387,6.53191056925866e-21,0.5390515923500061,8.221566846057004e-08
+Harry Potter,Antioch Peverell,0.5457143187522888,0.3247452655676739,0.5060713887214661,0.08578294498642179
+Harry Potter,Astoria Malfoy,0.505161702632904,0.0008781553040230637,0.5340600609779358,0.3197984887298726
+Harry Potter,Cadmus Peverell,0.5331827998161316,0.1617363232068362,0.5127511620521545,0.24283007612691213
+Harry Potter,Merope Riddle,0.5360749959945679,3.931058007897491e-07,0.5302800536155701,1.035925837468078e-05
+Harry Potter,Dexter Fortescue,0.5083675384521484,0.02672858527497318,0.49927106499671936,0.19074108690719113
+Harry Potter,Susan Bones,0.5229329466819763,0.0002700267654533279,0.5438013672828674,2.3579569809623674e-06
+Harry Potter,Silvanus Kettleburn,0.45456430315971375,6.237655734272488e-08,0.5587272047996521,0.017171066029079543
+Harry Potter,Antonin Dolohov,0.5568044781684875,0.00017311428809943535,0.5817202925682068,0.03348079697290573
+Harry Potter,Frank Longbottom,0.5779850482940674,0.00040616553274287553,0.5645447969436646,0.3153786102234897
+Harry Potter,Michael Corner,0.46743306517601013,1.4306645318480637e-06,0.5304694771766663,0.001905437274061067
+Harry Potter,Vernon Dursley,0.4193305969238281,0.6117686237027634,0.4480687379837036,2.5333829039030915
+Harry Potter,Marlene McKinnon,0.5041496157646179,0.8111850455575621,0.45006251335144043,0.12483473886513989
+Harry Potter,Dorcas Meadowes,0.46392571926116943,0.1643726245459285,0.5783253908157349,2.474595182981317
+Harry Potter,Emeric the Evil,0.5178278684616089,1.2622184001968104,0.5628010630607605,4.550841299645501
+Harry Potter,Dennis Creevey,0.49776142835617065,5.717898920479274e-05,0.5503764748573303,0.11136043735329025
+Harry Potter,Mykew Gregorovitch,0.5690000653266907,0.11857131016550881,0.5642809271812439,0.1498485329867405
+Harry Potter,George Weasley,0.44311073422431946,0.5884252857003849,0.4324892461299896,0.7008562267288053
+Harry Potter,Fred Weasley,0.43166789412498474,0.47100963562066334,0.44750624895095825,0.7332427461184158
+Harry Potter,Gregory Goyle,0.5400614738464355,5.886176560879137e-06,0.5341871976852417,0.0024968706670244105
+Harry Potter,Galatea Merrythought,0.4920767545700073,0.0032699201976819185,0.5274026393890381,0.9164211439946987
+Harry Potter,Victoire Weasley,0.4587174355983734,0.023478896559807866,0.49942678213119507,0.8306536323484959
+Harry Potter,Fat Lady,0.4970676898956299,0.0006078717408122774,0.5686796307563782,0.023751923435151718
+Harry Potter,Dominique Weasley,0.542873203754425,0.1360867317508552,0.48697128891944885,0.6925961895519774
+Harry Potter,Beedle the Bard,0.5292668342590332,4.020141019267585,0.5423824787139893,1.8524892553876358
+Harry Potter,Ignatius Prewett,0.46911752223968506,0.6497960800492384,0.4978139400482178,2.7852354204178047
+Harry Potter,Sirius Black I,0.4658055901527405,1.0827814871578363,0.46781203150749207,1.6251472078374127
+Harry Potter,Regulus Black I,0.44861671328544617,1.2077823151106382,0.4680110812187195,4.497858006635839
+Harry Potter,Louis Weasley,0.540837287902832,0.2192415943009566,0.48211774230003357,0.5860506227625341
+Harry Potter,Abraxas Malfoy,0.5586199760437012,1.1152304716334613,0.5188573002815247,2.809235649753128
+Harry Potter,Sirius Black II,0.421613872051239,1.1245359777324464,0.43757230043411255,2.385812775782805
+Harry Potter,Irma Crabbe,0.47812893986701965,0.03487666868803237,0.4259966313838959,1.2116468203241268
+Harry Potter,Porpentina Goldstein,0.4264792203903198,0.07032617444297437,0.47799986600875854,6.230131971321948
+Harry Potter,Radolphus Lestrange,0.47446632385253906,0.7665334276008785,0.5084900259971619,1.0012827648862979
+The Hunger Games,Gale Hawthorne,0.4092412292957306,0.38520219820231394,0.42561009526252747,1.1059587461436282
+The Hunger Games,Katniss Everdeen,0.4497995972633362,1.4265105776332534,0.47483837604522705,2.195442424563712
+The Hunger Games,Effie Trinket,0.5815386772155762,3.0792937880716245e-06,0.5591163039207458,1.2126429404944133e-06
+The Hunger Games,Coriolanus Snow,0.4087497889995575,0.3796436125093209,0.4526492655277252,1.5742954966856622
+The Hunger Games,Mrs. Everdeen,0.49565452337265015,2.9906462789126636e-05,0.5162612199783325,0.00011422808928675895
+The Hunger Games,Cinna,0.40385836362838745,0.8810996757287762,0.40217864513397217,1.5850030403767033
+The Hunger Games,Primrose Everdeen,0.5412285327911377,1.2830647151305e-07,0.5635352730751038,1.0822123731785479e-07
+The Hunger Games,Peeta Mellark,0.44384264945983887,1.1291179617171474,0.4579137861728668,1.2622192568833872
+The Hunger Games,Haymitch Abernathy,0.43163391947746277,1.134473813765434,0.4425918161869049,1.229173845922611
+The Hunger Games,Alma Coin,0.48114219307899475,2.95861523989604e-20,0.5374076962471008,0.0030204198098382515
+The Hunger Games,Casca Highbottom,0.48641085624694824,0.00539279226809264,0.4324418902397156,2.4006125507516925e-06
+The Hunger Games,Livia Cardew,0.4411150813102722,0.0011644289971937268,0.44976624846458435,0.018320461370184046
+The Hunger Games,Palmyra Monty,0.4756408631801605,2.723208990680631,0.5241082310676575,3.4981564388107538
+The Hunger Games,Persephone Price,0.48968610167503357,0.022044170826383953,0.5048912763595581,0.5750454742309209
+The Hunger Games,Festus Creed,0.48046550154685974,0.0004364852600695281,0.5087020397186279,0.007702797102961257
+The Hunger Games,Vipsania Sickle,0.46680545806884766,0.00914089311208026,0.4577060341835022,0.2628891810579304
+The Hunger Games,Hilarius Heavensbee,0.5234611630439758,2.0099604890001523,0.5248605608940125,0.9069424409920499
+The Hunger Games,Domitia Whimsiwick,0.4541220963001251,7.349920196184377e-05,0.45936286449432373,0.07436539044729262
+The Hunger Games,Clemensia Dovecote,0.5008890628814697,1.6014022585481102e-05,0.4638417661190033,4.811672253580727e-08
+The Hunger Games,Felix Ravinstill,0.48538246750831604,0.2378235069555312,0.4536113142967224,0.3712447120353729
+The Hunger Games,Lysistrata Vickers,0.4519827663898468,7.61478911561517e-07,0.4958251416683197,0.0011258190365234862
+The Hunger Games,Lucretius Flickerman,0.5081245303153992,1.4047005526211074e-06,0.5309379696846008,0.37400776220902393
+The Hunger Games,Reaper Ash,0.47439125180244446,0.00042856144723453125,0.49196308851242065,0.007660071244621089
+The Hunger Games,Mizzen,0.42592114210128784,1.2010382943299564e-06,0.44694557785987854,1.8699396361217116e-07
+The Hunger Games,Coral,0.4286750257015228,4.980235540597977e-07,0.4535006880760193,4.3197941769261226e-08
+The Hunger Games,Treech,0.5075646638870239,0.117626392781118,0.4749631881713867,0.0002962613432383224
+The Hunger Games,Dill,0.4356289803981781,0.0002891011143852142,0.4885352551937103,0.09602760983610494
+The Hunger Games,Mr. Everdeen,0.523754358291626,0.08714885212106858,0.5184605121612549,0.13979226686194846
+The Hunger Games,Volumnia Gaul,0.49472540616989136,6.869168631180911e-07,0.48181095719337463,9.132064293881348e-05
+Dune,Jamis,0.5443645715713501,0.08811188744360916,0.5378723740577698,0.14729622920499957
+Dune,Glossu Rabban Harkonnen,0.5374839901924133,2.346730239086161,0.5518556237220764,3.595008864263438
+Dune,Hwi Noree,0.5402935743331909,0.9546559571623305,0.5494601726531982,0.328316116884358
+Dune,Kailea Vernius,0.4088099002838135,3.3352686262241886e-07,0.5558974742889404,0.6014721844717049
+Dune,Helena Atreides,0.5068652033805847,0.11190350163380693,0.5462532639503479,0.3454311405400564
+Dune,Piter de Vries,0.46393299102783203,0.7901284138439427,0.5120517611503601,1.5202778799386392
+Dune,Ghanima Atreides,0.49605461955070496,0.4661478620566122,0.507136881351471,0.6183888868445536
+Dune,Farad'n Corrino,0.5614867806434631,0.9435758372206554,0.5808011889457703,1.2893259880279828
+Dune,Fafnir Corrino,0.49481943249702454,1.3196266092732591,0.5108835101127625,1.3455776093671192
+Dune,Dominic Vernius,0.4647168517112732,0.001586983294263305,0.5251268744468689,0.2164528169324232
+Dune,Faykan Butler,0.4672413170337677,0.0012918776867090426,0.45807644724845886,0.03459192430146977
+Dune,Thufir Hawat,0.5250611901283264,0.05386468296074758,0.5541090965270996,0.03488485236259507
+Dune,Kwisatz Haderach,0.562592089176178,0.20928503842604138,0.5722053050994873,0.8231619434607415
+Dune,Duncan Idaho,0.4214058220386505,1.0528955818498733,0.44391417503356934,2.1972041209773283
+Dune,Paul Atreides,0.477653831243515,1.4512494341597177,0.4664964973926544,1.4550534154014099
+Dune,Wellington Yueh,0.5041899085044861,0.42575377315702645,0.5070567727088928,0.2773105966965404
+Dune,Miles Teg,0.5735767483711243,0.5392183891587967,0.5745278596878052,0.3328531911163241
+Dune,Vladimir Harkonnen,0.4916495680809021,0.0011839907151858548,0.4781716763973236,0.015072736941780063
+Dune,Darwi Odrade,0.5599603652954102,0.004385374813780918,0.5430006980895996,0.001411389953756776
+Dune,Wensicia Corrino,0.5699851512908936,1.352215822644708,0.5664901733398438,1.7340149761303842
+Dune,Irulan Corrino,0.5094133019447327,0.013496032750195345,0.5159376263618469,0.07794179160711645
+Dune,Gurney Halleck,0.5254309177398682,0.021007851028187967,0.5530193448066711,0.07635120457093703
+Dune,Shaddam Corrino IV,0.4789293706417084,0.11210779791742681,0.47419947385787964,0.12945228495210676
+Dune,Leto Atreides II,0.43324440717697144,1.0353937872588983,0.4626670479774475,1.236763637671174
+Dune,Lucilla,0.5374746918678284,2.789838649157089,0.5433661341667175,4.2178043356399195
+Dune,Margot Fenring,0.5632591843605042,0.0901246651570515,0.5906767249107361,1.5331309572850005
+Dune,Murbella,0.5306856632232666,0.11611535783474038,0.5490666627883911,0.311518693349403
+Dune,Octa Butler,0.4596191346645355,0.026051839291221703,0.5119488835334778,0.9994224406668474
+Dune,Mohandas Suk,0.4551697373390198,0.27207357729137027,0.5508387684822083,1.4002093038690777
+Dune,Pardot Kynes,0.5781580805778503,0.667386054851426,0.5367116928100586,0.43639916783868365
+Dune,Raquella Berto-Anirul,0.5147691369056702,0.05050924728578873,0.5281753540039062,0.2529816771656182
+Dune,Norma Cenva,0.49413955211639404,1.279041748836781,0.5010496973991394,0.8907502084443788
+Dune,Norma Cevna,0.49869275093078613,0.29399185573892656,0.5146554112434387,1.1904821789395426
+Dune,Gaius Helen Mohiam,0.526836097240448,1.1550383866306306,0.5425923466682434,1.538226544406715
+Dune,Vorian Atreides,0.4723348021507263,0.7620755642897297,0.4491121768951416,1.0658716977179965
+Dune,Tlaloc,0.42855411767959595,0.011831545159601529,0.5015847682952881,0.9321172489174155
+Dune,Hayt,0.5439981818199158,3.035260854687782,0.5676808953285217,1.8890437314339483
+Dune,Rhombur Vernius,0.48807913064956665,0.04032262338387167,0.5621006488800049,0.4523142109796731
+Dune,Scytale,0.4897126257419586,0.4730752657490815,0.5085275173187256,0.5772822226475126
+Dune,Serena Butler,0.512681782245636,0.003478693947487364,0.5410799384117126,0.0026481038198425105
+Dune,Victor Atreides,0.4980299174785614,0.2910350286240949,0.5519011616706848,0.6685790952560978
+Dune,Shando Vernius,0.48915979266166687,0.1598593751666299,0.5207884311676025,0.4678524818639373
+Dune,Xavier Harkonnen,0.5057855248451233,0.000162317814965326,0.5143126249313354,0.043210444377176686
+Dune,Zufa Cenva,0.46705758571624756,0.011210431960996917,0.5524097681045532,0.3717184737716069
+Dune,Schwangyu,0.542707085609436,2.5322962807125675,0.45930904150009155,1.3245332225874222
+Dune,Siona Atreides,0.5410712957382202,0.5836693720482895,0.5667243003845215,0.14155065450061385
+Dune,Tylwyth Waff,0.5741361379623413,5.170779834619342,0.45839226245880127,0.1531923656668821
+Dune,Anirul Corrino,0.5136567950248718,1.876669671550019,0.5680137276649475,2.9275031075382563
+Dune,Sheeana Brugh,0.5340063571929932,0.9987176212057695,0.5581178069114685,2.533198754228731
+Dune,Stilgar,0.5065039992332458,0.3521997169623891,0.532167375087738,0.5102105680812854
+Dune,Hasimir Fenring,0.547031044960022,3.7819609171032647,0.5571539402008057,1.9510502823437563
+Dune,Leto Atreides I,0.4644840657711029,0.15613359356622647,0.49512797594070435,0.44035739619037206
+Dune,Minotauros Atreides,0.48161420226097107,0.16044323979093533,0.5114316344261169,0.39850084889227566
+Dune,Paulus Atreides,0.5753435492515564,0.7760784009369951,0.5783288478851318,0.6385111652421047
+Dune,Abulurd Harkonnen,0.5403276085853577,2.0133328786031646,0.5230582356452942,2.1064208263373607
+Dune,Alma Mavis Taraza,0.5075234174728394,0.6880329098557795,0.4995601177215576,0.2924849668415627
+Dune,Jessica Atreides,0.4570485055446625,0.144439164385024,0.47339460253715515,0.5619218432663521
+Dune,Chani Kynes,0.4593413472175598,0.765399971568981,0.48829561471939087,0.8681908027060506
+Dune,Feyd-Rautha Harkonnen,0.46087339520454407,1.219083792235549,0.4707494080066681,0.8714558397648315
+Dune,Alia Atreides,0.4658108353614807,0.33354382715178293,0.46704867482185364,0.19408789445886362
+Dune,Liet Kynes,0.5218409895896912,1.2782253296381967,0.5362000465393066,0.7457589774633218
+Dune,Moneo Atreides,0.5322074890136719,0.12216043027038535,0.552431046962738,0.03764658907125225
+Dune,Tio Holtzman,0.45090925693511963,5.580479411464467e-05,0.5041067600250244,0.006506435897014626
+Dune,Tyros Reffa,0.42983078956604004,0.13278622337086818,0.49830710887908936,1.1056474255429347
+Dune,Harah,0.4979162812232971,0.07253534945532525,0.5090550184249878,0.005811578891020777
+Dune,Elrood Corrino IX,0.4714115262031555,1.3470757706131942,0.5571887493133545,3.313130230889499
+Dune,Jehanne Butler,0.43893149495124817,0.028819476431780285,0.4444078803062439,0.051261321341301264
+Dune,Ramallo,0.4325161278247833,0.06871613995148869,0.501629650592804,1.8724518413711042
+Dune,Lady Jessica,0.468407541513443,0.2028299003667993,0.46810829639434814,0.31718526941437075
+Dune,Leto I Atreides,0.504253625869751,0.009952028120910663,0.5509100556373596,0.11957200314127743
+Dune,Glossu Rabban,0.5381055474281311,2.142668830782992,0.5276147723197937,1.6565587691391979
+Dune,Piter De Vries,0.5243019461631775,1.1976779929079377,0.524216890335083,1.2231867428278729
+Dune,Mapes,0.5485497713088989,2.672092425420998,0.48810961842536926,0.6152978619282995
+Dune,Liet-Kynes,0.5388694405555725,0.7744858351693394,0.5246624946594238,0.6364821227227504
+Dune,Otheym,0.5519803762435913,1.9190665607807564,0.488323450088501,0.41751516667183514
+Dune,Korba,0.4905959963798523,1.9028732015359178,0.4283231496810913,0.07611581087956468
+Dune,Aramsham,0.40584757924079895,0.8238137072393069,0.4876619875431061,0.9168460982084708
+Dune,Iakin Nefud,0.5762370824813843,3.774862952073259,0.5554256439208984,1.3726203168044029
+Dune,Edric,0.46370890736579895,1.4745048017739721,0.49676525592803955,1.1583227412688253
+Dune,Bijaz,0.5214598178863525,0.27487984239498103,0.5157041549682617,0.2647359742327518
+Dune,Lichna,0.4485637843608856,1.328797911591771,0.44648897647857666,0.7906917182421546
+Dune,Farok,0.5331273078918457,2.0201251314036,0.5111566185951233,0.8885105448392211
+Dune,Tyekanik,0.5045052170753479,0.8605449177709994,0.5404890775680542,1.138484106188053
+Dune,Ziarenko Javid,0.5213062167167664,0.7372080900214711,0.41952967643737793,0.1327686030114251
+Dune,Tertius Eileen Anteac,0.5371912717819214,1.3095758687498087,0.4741029441356659,0.06576978119434776
+Dune,Nayla,0.5458073019981384,1.1268773215155374,0.5432273745536804,1.7074506944108572
+Dune,Quintinius Violet Chenoeh,0.5333386659622192,0.0006095256747923344,0.4813452661037445,2.004955329972975e-05
+Dune,Marcus Claire Luyseyal,0.5409387946128845,5.817468506746241,0.47500813007354736,1.6040966020982437
+Dune,Bellonda,0.5828205943107605,3.054110317840824,0.592597246170044,3.6912129527072532
+Dune,Alef Burzmali,0.5161446928977966,1.0354349672963867,0.5568660497665405,1.6205280729583174
+Dune,Hedley Tuek,0.48375484347343445,1.07362282470827,0.5231477618217468,1.5332853064468446
+Dune,Marty,0.4924050271511078,0.9328987492488379,0.5335819125175476,1.646799387111592
+Dune,Daniel,0.5007641315460205,1.631312666915827,0.5122811198234558,1.1045714636700712
+Dune,Dama,0.4857421815395355,1.715304963446476,0.4941137433052063,0.6593159783259328
+Dune,Logno,0.49544182419776917,0.8754907556427473,0.4983513653278351,0.6225424791607346
+Twilight,Bella Swan,0.43482106924057007,0.4455320014066238,0.4403236210346222,1.0944117164519866
+Twilight,Edward Cullen,0.4453941583633423,0.678335766560001,0.455565869808197,1.2448036249161023
+Twilight,Jacob Black,0.4697536826133728,0.9226002690842543,0.4790249466896057,1.3640223465042558
+Twilight,Carlisle Cullen,0.46029287576675415,0.7512274707328349,0.48388591408729553,0.6457266717530008
+Twilight,Esme Cullen,0.469201922416687,0.837341342683267,0.4519073963165283,0.7246252844484066
+Twilight,Alice Cullen,0.43883439898490906,0.27762205177539756,0.4547623097896576,0.6615864714911912
+Twilight,Emmett Cullen,0.49038809537887573,0.6683083856310319,0.48034539818763733,1.7738577229534076
+Twilight,Rosalie Hale,0.43875133991241455,0.4878305133247904,0.4652736186981201,1.1990501138523448
+Twilight,Jasper Hale,0.45648518204689026,1.4435958440038834,0.45468655228614807,0.9914565227561849
+Twilight,Renesmee Cullen,0.43302661180496216,0.6311994326354685,0.4501892328262329,0.6259689675029592
+Twilight,Aro,0.4243939220905304,0.46061224121091704,0.43787676095962524,0.6814359492698534
+Twilight,Caius,0.5134718418121338,0.004125867393607857,0.524796724319458,0.0036320872433338205
+Twilight,Marcus,0.5064879059791565,0.00013564352237404047,0.5018938779830933,9.792731027764251e-06
+Twilight,Jane,0.5082261562347412,0.0001358791583441206,0.5344328284263611,1.017502052202691e-05
+Twilight,Alec,0.528445303440094,0.00116804313733483,0.515385627746582,2.8397931685995056e-05
+Twilight,Demetri,0.5613125562667847,0.0012281161893588296,0.5580273866653442,0.004024489540010405
+Twilight,Felix,0.5359938740730286,0.0005091215912758393,0.43852245807647705,1.5489576813631933e-10
+Twilight,Heidi,0.44545361399650574,1.0930883456035096e-07,0.4894866943359375,0.01844956558105942
+Twilight,Santiago,0.4405513107776642,0.011309682447928997,0.4655430316925049,0.021841682771676845
+Twilight,James,0.4592633545398712,2.1330016673097883e-08,0.5340688228607178,0.0005231826868600952
+Twilight,Victoria,0.439033567905426,0.6875448458491541,0.4483987092971802,0.7681029253884348
+Twilight,Laurent,0.5089563727378845,0.0012824595639473507,0.5217602252960205,0.02861783674096133
+Twilight,Riley Biers,0.5060179233551025,5.8316056860725515e-05,0.5028755068778992,0.0007274335295509815
+Twilight,Bree Tanner,0.5183275938034058,2.5557105349398566e-06,0.5305123329162598,4.429474834340514e-05
+Twilight,Mexican coven,0.5069307088851929,0.036235190696521355,0.46717697381973267,0.08806643208242962
+Twilight,Zafrina,0.5180833339691162,0.056121174900539444,0.5461707711219788,0.17186884231675187
+Twilight,Senna,0.5209641456604004,0.1763051382946032,0.4448743462562561,0.016242516603751367
+Twilight,Kachiri,0.5447333455085754,0.9279802981879801,0.4272400140762329,0.04809800954766655
+Twilight,Peter,0.5418391227722168,0.0016953537711623685,0.5076555609703064,1.2035564024001924e-05
+Twilight,Charlotte,0.4985171854496002,0.02629600614364144,0.4373162090778351,7.300929998184094e-06
+Twilight,Mary,0.5204862356185913,1.4050623647905562,0.5231702923774719,1.023507485669433
+Twilight,Randall,0.4445645213127136,0.21137629749761944,0.46512487530708313,0.18699848209366737
+Twilight,Eleazar,0.5085576176643372,0.023583436881051658,0.47188693284988403,2.4494355156982036e-05
+Twilight,Carmen,0.5116759538650513,0.008318089735211892,0.4351828396320343,4.641500438336512e-07
+Twilight,Tanya,0.49432075023651123,2.3001461533403244e-07,0.5129669904708862,1.0170264820004679e-06
+Twilight,Kate,0.48860788345336914,0.000418882395198394,0.501785933971405,3.64939274318443e-05
+Twilight,Garrett,0.4939342439174652,0.09748755652474841,0.47069159150123596,7.563791265191502e-06
+Twilight,Sasha,0.4354710578918457,0.0038431673822077795,0.5104660987854004,1.085126858305472
+Twilight,Vasilii,0.41973641514778137,0.0006704062509159768,0.42285558581352234,0.05885283015416893
+Twilight,Irina,0.4988650679588318,0.00777066092583459,0.5048238039016724,0.0002198406798161469
+Twilight,Tia,0.5152443051338196,8.281582664851085e-06,0.4440155327320099,2.1940759158160988e-07
+Twilight,Amun,0.5048414468765259,0.03634249466666956,0.4782496392726898,0.00011413806783015471
+Twilight,Benjamin,0.5442827343940735,0.00021993008951214533,0.4993562400341034,3.5617777723742515e-06
+Twilight,Kebi,0.5474962592124939,0.013636178321943443,0.5137883424758911,0.06685787326882119
+Twilight,Nahuel,0.5094461441040039,0.012278268845722654,0.4904480576515198,1.6070452396395075e-05
+Twilight,Huilen,0.5564388036727905,0.4284589161086818,0.503710150718689,0.017689843612317367
+Twilight,Sam Uley,0.46574684977531433,0.7360231784276465,0.46713006496429443,0.9117944123315557
+Twilight,Quil Ateara V,0.5100683569908142,0.1399546709235932,0.561724066734314,0.04476182157961291
+Twilight,Embry Call,0.5151413679122925,0.021446235885536503,0.5060939192771912,0.00930585974176835
+Twilight,Paul Lahote,0.5231663584709167,0.0037034459416706335,0.45771995186805725,9.125731580496854e-11
+Twilight,Jared Cameron,0.5240994691848755,0.018601466544971176,0.532151997089386,0.006780010158484718
+Twilight,Leah Clearwater,0.5293238162994385,8.166267693146855e-05,0.518864095211029,6.573006431824589e-06
+Twilight,Seth Clearwater,0.5219919085502625,2.6690509572979773e-05,0.5368380546569824,2.255735781664663e-08
+Twilight,Collin Littlesea,0.40156927704811096,0.3932931368504643,0.43680256605148315,0.3934013293951621
+Twilight,Brady Fuller,0.4072178900241852,0.4258262646188319,0.37133386731147766,0.12471716492845157
+Twilight,Ephraim Black,0.49252963066101074,0.3658229524051135,0.5327481627464294,0.7108240690539922
+Twilight,Charlie Swan,0.44749635457992554,0.6579660924865371,0.42621874809265137,1.7776992499067172
+Twilight,Harry Clearwater,0.5476050972938538,0.04389439564074397,0.5528291463851929,0.9881387357176821
+Twilight,Billy Black,0.5193422436714172,0.0001874504207676882,0.5135833621025085,2.3514188956747863e-06
+Twilight,Tyler Crowley,0.5633716583251953,1.140472973049515,0.554145872592926,0.49037170026777693
+Twilight,Lauren Mallory,0.452099472284317,0.08813459510465665,0.4402539134025574,0.010352699627859435
+Twilight,Mike Newton,0.5422773957252502,0.022807163141619256,0.5342537760734558,0.011984005660149604
+Twilight,Jessica Stanley,0.5055686235427856,0.00037801822421734265,0.5701385736465454,0.004160155633692188
+Twilight,Angela Weber,0.5451338291168213,0.017936963699077213,0.5681806206703186,0.008009266249548834
+Twilight,Eric Yorkie,0.5205509662628174,0.0642821243823755,0.5471195578575134,0.1757533347555896
+Twilight,Emily Young,0.5085493326187134,7.213279249480357e-06,0.5158069729804993,0.03302552158717686
+Twilight,Sue Clearwater,0.5386261343955994,0.2446133009175692,0.5701732635498047,0.07040580264026068
+Twilight,Quil Ateara III,0.5075376033782959,1.6239529934461778,0.5255942344665527,1.253521363478485
+Twilight,Rachel Black,0.4674018919467926,0.001908030312325637,0.4487326741218567,0.0005745818831719202
+Twilight,Rebecca Black,0.40655308961868286,0.09442198445734402,0.4780859351158142,1.0728798273423112
+Twilight,J. Jenks,0.5524166822433472,0.2216549059922535,0.43941813707351685,0.00015883031212545344
+The Hitchhiker's Guide to the Galaxy,Zaphod Beeblebrox,0.524462878704071,3.4262332847912296,0.5250093936920166,2.6664367028483587
+The Hitchhiker's Guide to the Galaxy,Arthur Dent,0.5273870825767517,0.4291602688773915,0.514234721660614,0.40709212137192696
+The Hitchhiker's Guide to the Galaxy,Marvin,0.4958357512950897,0.2549632048619335,0.5185239911079407,0.551938654927262
+The Hitchhiker's Guide to the Galaxy,Trillian,0.541327714920044,0.2824537261145331,0.5415677428245544,0.10505347154828759
+The Hitchhiker's Guide to the Galaxy,Ford Prefect,0.503355860710144,0.0006953983515379682,0.5187045335769653,0.00024207269074722943
+The Hitchhiker's Guide to the Galaxy,Fenchurch,0.4917847216129303,0.3875229487300913,0.529032826423645,3.2876831458105626
+The Hitchhiker's Guide to the Galaxy,Deep Thought,0.5529919266700745,11.519914392508579,0.5442690849304199,6.210687877411387
+The Hitchhiker's Guide to the Galaxy,Eddie the Computer,0.5399253964424133,2.188887974283159,0.5238444209098816,1.004459664113718
+The Hitchhiker's Guide to the Galaxy,Agrajag,0.5030543804168701,0.041577053111906945,0.5538953542709351,0.8253280676268905
+The Hitchhiker's Guide to the Galaxy,Alice Beeblebrox,0.484935998916626,2.711765617333668,0.541314423084259,1.1895178198195084
+The Hitchhiker's Guide to the Galaxy,Allitnils,0.4514158368110657,0.7028346729424548,0.4638689458370209,1.1777096018292839
+The Hitchhiker's Guide to the Galaxy,Almighty Bob,0.460065096616745,1.8494015249277382,0.5477994680404663,2.8131422225838065
+The Hitchhiker's Guide to the Galaxy,Barman,0.5393004417419434,4.006915173714827,0.5009679794311523,1.4300801901079685
+The Hitchhiker's Guide to the Galaxy,Caveman,0.47303149104118347,1.0964563631165871,0.4764082133769989,3.0170284945414987
+The Hitchhiker's Guide to the Galaxy,Colin the Security Robot,0.5130603909492493,2.195416666697422,0.5294987559318542,2.83372619940594
+The Hitchhiker's Guide to the Galaxy,Constant Mown,0.4848194122314453,2.6657217316758315,0.45281487703323364,1.8655346074264398
+The Hitchhiker's Guide to the Galaxy,Dan Streetmentioner,0.4536094069480896,0.3031424793759491,0.5627984404563904,3.105531301877437
+The Hitchhiker's Guide to the Galaxy,Ameglian Major Cow,0.4219990670681,0.03332571162170555,0.5657427310943604,4.5520837618766254
+The Hitchhiker's Guide to the Galaxy,Eccentrica Gallumbits,0.5684916377067566,1.600421665398035,0.5986104011535645,5.373714248134763
+The Hitchhiker's Guide to the Galaxy,Effrafax of Wug,0.5077013373374939,2.7731462138051746,0.5334528088569641,2.335238629716953
+The Hitchhiker's Guide to the Galaxy,Elvis Presley,0.5296612977981567,1.5097927475210293,0.5141420960426331,3.4541161122097406
+The Hitchhiker's Guide to the Galaxy,Emperor of the Galaxy,0.5510367751121521,4.235283767584108,0.5142906308174133,5.053041068047337
+The Hitchhiker's Guide to the Galaxy,Enid Kapelsen,0.4745515286922455,1.8897915120627367,0.48542317748069763,1.4991200642594105
+The Hitchhiker's Guide to the Galaxy,Frankie and Benjy,0.5203325748443604,0.4796580016301606,0.5363255739212036,1.6535834987703122
+The Hitchhiker's Guide to the Galaxy,Gag Halfrunt,0.48898032307624817,0.09826209766436299,0.5009781122207642,1.4467308498254026
+The Hitchhiker's Guide to the Galaxy,Gail Andrews,0.45354676246643066,2.1617533355470595,0.4651680290699005,1.5026311790084164
+The Hitchhiker's Guide to the Galaxy,Pizpot Gargravarr,0.47502923011779785,0.8398734947705285,0.49327972531318665,1.082886502347771
+The Hitchhiker's Guide to the Galaxy,Garkbit,0.5128700733184814,1.946174109013354,0.5239087343215942,2.7189349347234626
+The Hitchhiker's Guide to the Galaxy,Genghis Khan,0.5043528079986572,1.1397913126863475,0.5355144143104553,3.3542247691558065
+The Hitchhiker's Guide to the Galaxy,God,0.47207966446876526,0.00047432513210303917,0.5213883519172668,2.9978883732699493
+The Hitchhiker's Guide to the Galaxy,Golgafrinchans,0.49987417459487915,0.0022159234741147347,0.5546774864196777,0.9447810135592937
+The Hitchhiker's Guide to the Galaxy,Agda,0.45730555057525635,1.421807429387134,0.5298096537590027,6.014122812135902
+The Hitchhiker's Guide to the Galaxy,Mella,0.45204105973243713,1.4347506010220392,0.5313006639480591,8.1670382369449
+The Hitchhiker's Guide to the Galaxy,Captain,0.5367289185523987,2.8051123068236277,0.5163926482200623,1.8205771236546997
+The Hitchhiker's Guide to the Galaxy,Great Circling Poets of Arium,0.5344204902648926,4.215717009037368,0.4994305968284607,2.7808154040086315
+The Hitchhiker's Guide to the Galaxy,Number One,0.4791897237300873,1.594780988308329,0.5081092715263367,1.2952635584734353
+The Hitchhiker's Guide to the Galaxy,Number Two,0.43616983294487,0.9727470646024056,0.4991006553173065,1.892146497758362
+The Hitchhiker's Guide to the Galaxy,Googleplex Starthinker,0.5453110933303833,6.22332899740104,0.48882734775543213,1.7581331973296717
+The Hitchhiker's Guide to the Galaxy,Great Green Arkleseizure,0.528576135635376,3.3641340015274275,0.5669288635253906,6.084616931515877
+The Hitchhiker's Guide to the Galaxy,Great Hyperlobic Omnicognate Neutron Wrangler,0.5309005975723267,1.657872944449094,0.5062478184700012,1.0521555069322066
+The Hitchhiker's Guide to the Galaxy,Grunthos the Flatulent,0.5367152690887451,2.4596168074797715,0.5372061729431152,2.8103312219034002
+The Hitchhiker's Guide to the Galaxy,Guide Mark II,0.531345784664154,0.9675061823276139,0.5462780594825745,4.615148925284156
+The Hitchhiker's Guide to the Galaxy,Hactar,0.43298080563545227,0.0638868818155731,0.5714038014411926,2.182541127424045
+The Hitchhiker's Guide to the Galaxy,Haggunenons,0.5025545358657837,0.4367676940461874,0.5594968795776367,1.4825687364523716
+The Hitchhiker's Guide to the Galaxy,Heimdall,0.5224384665489197,2.6783508235738216,0.5182178020477295,2.6073897142980638
+The Hitchhiker's Guide to the Galaxy,Hig Hurtenflurst,0.4822753369808197,2.544482997403323,0.5229754447937012,1.666027704785033
+The Hitchhiker's Guide to the Galaxy,Hillman Hunter,0.4723850190639496,0.34632852795025587,0.526846706867218,1.3370309332758832
+The Hitchhiker's Guide to the Galaxy,Hotblack Desiato,0.5021933913230896,3.3008269824014618,0.5471504330635071,4.551225292467061
+The Hitchhiker's Guide to the Galaxy,Humma Kavula,0.4854157567024231,1.8566239223449013,0.5276848077774048,5.1409588951076595
+The Hitchhiker's Guide to the Galaxy,Hurling Frootmig,0.4553157687187195,0.5980317027887777,0.496841698884964,2.4275447729382313
+The Hitchhiker's Guide to the Galaxy,Judiciary Pag,0.4658540189266205,2.2053176174830873,0.5554378628730774,1.6235163327564248
+The Hitchhiker's Guide to the Galaxy,Know-Nothing-Bozo,0.46053677797317505,1.4569908972698322,0.48593950271606445,1.0203099039770764
+The Hitchhiker's Guide to the Galaxy,Prostetnic Vogon Kwaltz,0.5228808522224426,1.413300164165216,0.5617323517799377,5.506559981534626
+The Hitchhiker's Guide to the Galaxy,Lady Cynthia Fitzmelton,0.49885913729667664,0.12586928404825376,0.4810832440853119,0.3714832006276615
+The Hitchhiker's Guide to the Galaxy,Lajestic Vantrashell of Lob,0.5244877934455872,1.6782123801521862,0.4863301217556,3.5138242186876836
+The Hitchhiker's Guide to the Galaxy,Lallafa,0.47532039880752563,0.7379222995063882,0.5090602040290833,1.4554464099777698
+The Hitchhiker's Guide to the Galaxy,Lig Lury Jr,0.44911476969718933,0.16754347284360904,0.4874661862850189,3.8898089760996233
+The Hitchhiker's Guide to the Galaxy,Lintilla,0.4331122636795044,0.005411669416613914,0.5091831088066101,0.25626518706538576
+The Hitchhiker's Guide to the Galaxy,Loonquawl,0.5614513754844666,1.0392012009479468,0.5413311123847961,1.8312584658984103
+The Hitchhiker's Guide to the Galaxy,Phouchg,0.5244225859642029,3.808853748013472,0.49268457293510437,1.0361474155167532
+The Hitchhiker's Guide to the Galaxy,The Lord,0.4792201817035675,1.354531018545949,0.5172358751296997,3.7359408086054313
+The Hitchhiker's Guide to the Galaxy,Lunkwill and Fook,0.44593310356140137,0.9297680416501386,0.5080600380897522,1.226444792424524
+The Hitchhiker's Guide to the Galaxy,Majikthise,0.5912110805511475,12.419892748137844,0.5154204964637756,1.6728761877985672
+The Hitchhiker's Guide to the Galaxy,Vroomfondel,0.5641388893127441,6.758901834977475,0.5593463778495789,9.166721456417331
+The Hitchhiker's Guide to the Galaxy,Max Quordlepleen,0.4794408977031708,2.500813108490748,0.5032950639724731,3.402245075624442
+The Hitchhiker's Guide to the Galaxy,Murray Bost Henson,0.5320996642112732,2.2800785875024605,0.5120303630828857,2.9196774427018526
+The Hitchhiker's Guide to the Galaxy,Old Man on the Pole,0.4875471293926239,4.7026631692738174,0.5063191056251526,6.556842986455579
+The Hitchhiker's Guide to the Galaxy,Old Thrashbarg,0.5388432741165161,4.580215516699263,0.5233867764472961,2.5792795717445203
+The Hitchhiker's Guide to the Galaxy,Old Woman in the Cave,0.46451830863952637,1.1709525801459921,0.5418444275856018,7.01581392016772
+The Hitchhiker's Guide to the Galaxy,Oolon Colluphid,0.5167138576507568,0.3045699755027859,0.5419318079948425,4.214868428260779
+The Hitchhiker's Guide to the Galaxy,Paul Neil Milne Johnstone,0.5655339360237122,1.9738671343872252,0.46876105666160583,2.2279106651680562
+The Hitchhiker's Guide to the Galaxy,Poodoo,0.452880322933197,1.4072647612067786,0.4860747456550598,1.3931414530091124
+The Hitchhiker's Guide to the Galaxy,Prak,0.44076988101005554,0.11800491483896147,0.514369547367096,0.4189564750491458
+The Hitchhiker's Guide to the Galaxy,Mr Prosser,0.5069112777709961,3.1382313033558216,0.5113053321838379,4.395576469021057
+The Hitchhiker's Guide to the Galaxy,Prostetnic Vogon Jeltz,0.5412886142730713,3.643808150265752,0.5502378940582275,5.653932292970884
+The Hitchhiker's Guide to the Galaxy,Questular Rontok,0.4964897036552429,1.9219703990808992,0.4972619116306305,4.186321044159038
+The Hitchhiker's Guide to the Galaxy,Random Dent,0.5012661218643188,1.1383587611249018,0.47456905245780945,0.15249740194949776
+The Hitchhiker's Guide to the Galaxy,Reg Nullify,0.4720154106616974,1.902039103801027,0.5031870007514954,2.4571186786862946
+The Hitchhiker's Guide to the Galaxy,Rob McKenna,0.48692765831947327,0.3086540700572138,0.45095646381378174,0.2733458965409725
+The Hitchhiker's Guide to the Galaxy,Roosta,0.5273421406745911,4.546129077375875,0.526216983795166,4.499983689897789
+The Hitchhiker's Guide to the Galaxy,Ruler of the Universe,0.49774467945098877,4.313830727905722,0.5375468134880066,9.833476668039365
+The Hitchhiker's Guide to the Galaxy,Russell,0.4884125888347626,1.8988536444481339,0.4713529944419861,3.520367733124405
diff --git a/ressources/data/normal_eval2.csv b/ressources/data/normal_eval2.csv
new file mode 100644
index 0000000000000000000000000000000000000000..edf3abfb4606a4b647c264ee764fe7e0b936bbb1
--- /dev/null
+++ b/ressources/data/normal_eval2.csv
@@ -0,0 +1,541 @@
+Book,Character,F1,h,wF1,wh
+The Lord of the Rings,Gimli,0.5107972621917725,0.13443801698858393,0.5312908291816711,0.0010502826036163104
+The Lord of the Rings,Elrond,0.48144325613975525,1.9066771373142473,0.4988417327404022,2.9819810241075775
+The Lord of the Rings,Arwen,0.5305920243263245,0.37755328562381246,0.5569050312042236,0.01984585606742649
+The Lord of the Rings,Smaug,0.4648820757865906,0.3524463914799617,0.4800652265548706,1.3515472953976442
+The Lord of the Rings,Frodo Baggins,0.4845083951950073,1.1586468251538447,0.4625139534473419,1.4225173697938494
+The Lord of the Rings,Samwise Gamgee,0.516346275806427,0.008635374302851169,0.5517829060554504,6.5870881136989915e-06
+The Lord of the Rings,Bilbo Baggins,0.48496222496032715,0.4667388614757454,0.45521271228790283,1.5087656836859384
+The Lord of the Rings,Gandalf,0.4683649241924286,1.8641340440344578,0.47944560647010803,2.943566129836775
+The Lord of the Rings,Ancalagon,0.5332438945770264,1.3377617161546485,0.6009135246276855,1.4144417320430205
+The Lord of the Rings,Shadowfax,0.5600701570510864,3.1191973573171596,0.5659462809562683,1.640608785919145
+The Lord of the Rings,Aragorn II,0.5159305930137634,0.8487244291573735,0.4953894317150116,2.043826256497426
+The Lord of the Rings,Balin,0.5085909962654114,0.11893969038926464,0.5512982606887817,0.0006193675269840093
+The Lord of the Rings,Bard,0.5050056576728821,0.026355821855447232,0.5294986367225647,3.8916732286303865e-05
+The Lord of the Rings,Beorn,0.5323838591575623,0.40355825841957293,0.5768317580223083,0.09970775452842542
+The Lord of the Rings,Boromir,0.5241677165031433,0.11814064196671167,0.5525522828102112,0.005484679592346904
+The Lord of the Rings,Celeborn,0.5215296149253845,0.016747108346838697,0.5615239143371582,2.142140399366893e-05
+The Lord of the Rings,Celebrimbor,0.5067253708839417,1.0575533301855333,0.5297908186912537,4.509701427063241
+The Lord of the Rings,Denethor II,0.5090440511703491,0.4427748040732883,0.5393776297569275,0.015071400709693204
+The Lord of the Rings,Éomer,0.5233268737792969,1.31307278010033,0.5731189846992493,0.012043907093584457
+The Lord of the Rings,Éowyn,0.5037795305252075,0.361386855873899,0.5493127107620239,0.0015152985022966877
+The Lord of the Rings,Faramir,0.48917776346206665,1.9933056139062686,0.49465763568878174,1.0009769697555702
+The Lord of the Rings,Galadriel,0.48361361026763916,1.6749693381935893,0.4992484152317047,3.2063685807969784
+The Lord of the Rings,Gollum,0.4888666868209839,9.018845864385908e-07,0.5417248010635376,2.7974571228520384e-07
+The Lord of the Rings,Gríma,0.5452985763549805,0.12507425417562654,0.5739209651947021,0.001444550575421236
+The Lord of the Rings,Halbarad,0.5402034521102905,1.5555165009375898,0.5443882346153259,0.9193734631361065
+The Lord of the Rings,Isildur,0.5210660099983215,0.017112526797695954,0.5709381103515625,0.0036278594316332944
+The Lord of the Rings,Fíli and Kíli,0.5192292332649231,0.27310838457265874,0.5307999849319458,3.5865667022029726e-06
+The Lord of the Rings,Legolas,0.44903311133384705,0.4938407484802042,0.4525555372238159,0.7749630835867346
+The Lord of the Rings,Lúthien,0.502149760723114,0.40962033556620503,0.5771726965904236,0.04542356400705872
+The Lord of the Rings,Maedhros,0.5247319340705872,0.2123965678051731,0.5603326559066772,0.0002969941491448213
+The Lord of the Rings,Melian,0.5110530853271484,0.9072737050244859,0.558221161365509,0.6104079360669264
+The Lord of the Rings,Meriadoc Brandybuck,0.5133464932441711,0.5250487917458937,0.550666332244873,0.03158295001470082
+The Lord of the Rings,Peregrin Took,0.5051109790802002,0.0387485409175478,0.5279164910316467,0.014012182356224005
+The Lord of the Rings,Radagast,0.506693422794342,0.021353734943244433,0.5546278357505798,1.0152234499330084e-06
+The Lord of the Rings,Saruman,0.45274069905281067,0.6746712656900681,0.482961505651474,3.480506052068393
+The Lord of the Rings,Sauron,0.4608953893184662,1.5314123718499901,0.4845940172672272,3.1077663702399985
+The Lord of the Rings,Shelob,0.5020247101783752,0.3466171382980677,0.5636788010597229,0.039742288595973736
+The Lord of the Rings,Théoden,0.5182898044586182,0.7738611620859397,0.5532655715942383,0.011785631773644209
+The Lord of the Rings,Thingol,0.4958104193210602,0.24362746256268125,0.5246247053146362,0.0049826164501031575
+The Lord of the Rings,Thranduil,0.5080153346061707,0.01579507538235909,0.5616160035133362,0.0010448700161254814
+The Lord of the Rings,Thrór,0.5086534023284912,0.3902141821370657,0.5392593145370483,0.01894576151077465
+The Lord of the Rings,Thorin II,0.5045325756072998,0.0007322934482175243,0.5158685445785522,6.198932369864242e-14
+The Lord of the Rings,Tom Bombadil,0.5555573105812073,0.00462060037652879,0.5867260694503784,0.00013693212326738497
+The Lord of the Rings,Treebeard,0.5185574889183044,1.2924189206223544,0.5584478378295898,0.04260455577819719
+The Lord of the Rings,Túrin,0.5115970969200134,0.004951190912082361,0.5016611218452454,5.695012396675375e-07
+The Lord of the Rings,Ungoliant,0.5175648331642151,0.5045119669209628,0.5658931136131287,0.1311805506644553
+The Lord of the Rings,Morgoth,0.49528229236602783,0.7517437502222779,0.5033573508262634,3.605734869013335
+The Lord of the Rings,Watcher in the Water,0.509591817855835,0.0042405603228484875,0.5722002983093262,0.005098600944878759
+The Lord of the Rings,Gil-galad,0.5118858814239502,0.006885749992454892,0.5715324282646179,0.0005677095052421668
+The Lord of the Rings,Círdan,0.5244083404541016,0.004101172412841658,0.5692238807678223,0.005268042048271648
+The Lord of the Rings,Khamûl,0.5237871408462524,2.43887942795052,0.5735595226287842,0.15456034586839554
+The Lord of the Rings,Thráin II,0.5144786238670349,0.207425169155333,0.5409236550331116,0.10152587198732502
+The Lord of the Rings,Glaurung,0.5107595324516296,1.0976330498120732,0.5491192936897278,0.28097354712994926
+The Lord of the Rings,Haldir,0.5345706343650818,0.9740524529530927,0.5514468550682068,0.1798993731779006
+The Lord of the Rings,Eärendil,0.5218246579170227,0.23744501395056095,0.5529740452766418,0.1858125783956537
+The Lord of the Rings,Glorfindel,0.5237315893173218,0.02478200149972964,0.5661776661872864,0.0005118026420829272
+The Lord of the Rings,Gothmog,0.4830499291419983,0.9579740012719514,0.5577483773231506,0.5803658301115063
+The Lord of the Rings,Beregond,0.4933965504169464,0.9584065938756756,0.5446121096611023,1.4934357387288448
+The Lord of the Rings,Berúthiel,0.5082423090934753,0.4980957631416123,0.48214003443717957,1.8618666625103751e-07
+The Lord of the Rings,Turgon,0.5152546167373657,0.7196076748658506,0.5317980051040649,0.015598328896658482
+The Lord of the Rings,Elendil,0.5218909978866577,0.013083401921036648,0.553233802318573,0.003957820935323402
+The Lord of the Rings,Beren,0.44145065546035767,1.6079673211409782,0.5460377931594849,0.009328295719264714
+The Lord of the Rings,Finrod,0.5122394561767578,0.17758238411961183,0.5545536875724792,0.1981583361706848
+The Lord of the Rings,Fingolfin,0.5022886395454407,0.02533580599084918,0.5144557356834412,2.3619318025066533e-06
+The Lord of the Rings,Fingon,0.50736403465271,0.23315054811117158,0.5217533707618713,0.0008768081132217099
+The Lord of the Rings,Gilraen,0.5016113519668579,2.895608857519192,0.553003191947937,1.46865577370515
+The Lord of the Rings,Morwen,0.5035339593887329,0.6349007574535617,0.5222731828689575,0.27339408889400757
+The Lord of the Rings,Idril,0.4942112863063812,1.314068414335417,0.5591335892677307,1.2186850059625787
+The Lord of the Rings,Glóin,0.5173260569572449,0.7753463717986114,0.5583321452140808,0.35802011091126157
+The Lord of the Rings,Ecthelion,0.4312361776828766,0.9034334532769908,0.4456978738307953,0.05281815463451589
+The Lord of the Rings,Forlong,0.5085343718528748,1.549760866172044,0.5502689480781555,0.8266883258751802
+The Lord of the Rings,Húrin,0.5138395428657532,0.20848051837140028,0.5366945266723633,0.04618263932211724
+The Lord of the Rings,Huor,0.5211780071258545,1.9514863175383215,0.5451857447624207,0.7122223494855264
+The Lord of the Rings,Lindir,0.5308339595794678,2.8669263177889683,0.5704033970832825,1.4879310709079154
+The Lord of the Rings,Erestor,0.5305899977684021,4.162589726390359,0.5322591662406921,1.593003584680578
+The Lord of the Rings,Elladan and Elrohir,0.5400537252426147,0.9585946716255522,0.608399510383606,0.07822826622225909
+The Lord of the Rings,Glóredhel,0.49147018790245056,0.8106219507015622,0.5282789468765259,1.5200741821498482
+The Lord of the Rings,Eöl,0.5101185441017151,0.6494136958348102,0.5270647406578064,0.2854043157767277
+The Lord of the Rings,Erendis,0.5087628960609436,0.8706285962587988,0.42865413427352905,0.0073167026745190435
+The Lord of the Rings,Fëanor,0.4521108567714691,0.48601597088876874,0.4460250437259674,0.6571302757098753
+The Lord of the Rings,Aredhel,0.48831963539123535,0.9713200343189435,0.549968421459198,0.13678508768404535
+The Lord of the Rings,Caranthir,0.5087292194366455,1.4316522621812555,0.5816361904144287,1.346527062486357
+The Lord of the Rings,Curufin,0.5269293189048767,0.6972882576829325,0.5494112968444824,0.17480176729290206
+The Lord of the Rings,Amras,0.5004374980926514,1.042931394413283,0.5468523502349854,1.3165392456723393
+The Lord of the Rings,Amrod,0.5072486996650696,0.6558058192188447,0.4968511462211609,0.4822242934610138
+The Lord of the Rings,Amlach,0.5124720931053162,0.9659121001635894,0.5379443764686584,1.7768019393816916
+The Lord of the Rings,Angrod,0.4995771050453186,0.9248858604226928,0.5527409911155701,2.049501854594676
+The Lord of the Rings,Aegnor,0.5125909447669983,1.498046685500856,0.4651370346546173,0.0296654542630147
+The Lord of the Rings,Handir,0.4844471216201782,1.0750647162331015,0.48563966155052185,0.8722875514599088
+The Lord of the Rings,Hareth,0.49066197872161865,0.6458685488562352,0.4939611554145813,2.514221027726332
+The Lord of the Rings,Hador,0.518173336982727,1.58841080246865,0.5241454839706421,1.0883444726317961
+The Lord of the Rings,Haleth,0.5136412382125854,0.8894122996936564,0.4729166328907013,0.03535011583276276
+The Lord of the Rings,Baragund,0.5189000368118286,0.4838161711203855,0.5419530272483826,1.2379986208804779
+The Lord of the Rings,Barahir,0.4928971230983734,2.00003374129367,0.5363517999649048,0.4027738909449855
+The Lord of the Rings,Baran,0.4921049177646637,0.5796325470572229,0.46089115738868713,1.4055261504592633
+The Lord of the Rings,Belemir,0.47782522439956665,0.4576622905168591,0.42291128635406494,1.2257453854580285
+The Lord of the Rings,Celegorm,0.5223082900047302,0.7356652172163988,0.560785710811615,0.7826825216451271
+The Lord of the Rings,Carcharoth,0.48646846413612366,0.6385166502613182,0.5634573101997375,0.36626985925996053
+The Lord of the Rings,Gelmir,0.5109872817993164,1.3328831640981806,0.47090843319892883,0.3053119583828561
+The Lord of the Rings,Galdor,0.5094061493873596,1.1159833070496714,0.5091733932495117,3.4446855583506504
+The Lord of the Rings,Gwindor,0.5087060928344727,0.8921264216760356,0.5551761388778687,0.42173065592396797
+The Lord of the Rings,Eilinel,0.47498664259910583,0.3264001740397163,0.43308714032173157,1.4391811008157722
+The Lord of the Rings,Finduilas,0.5043450593948364,0.632274220881976,0.5123217701911926,0.24130402060723413
+The Lord of the Rings,Rían,0.4961355924606323,0.9946956434845338,0.5180808901786804,0.8743378123412466
+The Lord of the Rings,Nienor,0.5087598562240601,0.6209307330328973,0.5361229181289673,0.25342925110983466
+The Lord of the Rings,Mablung,0.5178106427192688,0.5325523006676214,0.569983720779419,0.5836129963765792
+Harry Potter,Nymphadora Tonks,0.4435421824455261,0.6859964459287748,0.46977338194847107,0.5048507440186165
+Harry Potter,Dolores Umbridge,0.44084709882736206,0.45896869574809074,0.4294912815093994,3.0868992167502634
+Harry Potter,Fleur Delacour,0.46302181482315063,0.45587791584923554,0.4460435211658478,0.24171342684239935
+Harry Potter,Sirius Black,0.4392881393432617,0.4511392466785318,0.43665993213653564,0.6741745154140073
+Harry Potter,Remus Lupin,0.44049179553985596,0.4275229034952983,0.4447333812713623,2.4146510191986827
+Harry Potter,Minerva McGonagall,0.48225265741348267,0.21885526270943548,0.46760377287864685,4.567211100804414
+Harry Potter,Neville Longbottom,0.4250537157058716,0.40503104644677257,0.40079912543296814,0.703794524437064
+Harry Potter,Helga Hufflepuff,0.5153310298919678,0.9549677795947401,0.5492783188819885,0.11609126093426592
+Harry Potter,Albus Dumbledore,0.4280836284160614,0.5300415154093028,0.4623830318450928,2.0296755559231205
+Harry Potter,Cedric Diggory,0.44349366426467896,0.2648299538387257,0.48098424077033997,1.6465408930425656
+Harry Potter,Cho Chang,0.4231358468532562,0.3988075322288494,0.4674471914768219,0.6284094386050539
+Harry Potter,Rufus Scrimgeour,0.515264093875885,0.0010522797096094469,0.5608766674995422,3.6156858316372334e-07
+Harry Potter,Arabella Figg,0.49977296590805054,0.0006881625613978888,0.5660061836242676,0.0007005743895294081
+Harry Potter,Sybill Trelawney,0.45816344022750854,0.5482148829107473,0.46940577030181885,2.8798410942004162
+Harry Potter,Rita Skeeter,0.4587719142436981,0.9114597085491148,0.47384923696517944,1.0104308133364373
+Harry Potter,Padma Patil,0.4924223721027374,0.08119999840452988,0.5555651187896729,0.0015191952303639268
+Harry Potter,Myrtle Warren,0.48045504093170166,1.0701752161855695,0.4709806740283966,2.9196646606749446
+Harry Potter,Peter Pettigrew,0.439274400472641,0.5518819575752251,0.43477287888526917,2.667822673409213
+Harry Potter,Bellatrix Lestrange,0.46121734380722046,0.2534420421259237,0.45441338419914246,0.962492324603031
+Harry Potter,Alastor Moody,0.45167651772499084,0.5009578795219183,0.4389686584472656,0.5182164274500778
+Harry Potter,Gilderoy Lockhart,0.4285164475440979,0.5575312214243104,0.451913446187973,6.319755578667511
+Harry Potter,Newton Scamander,0.4526854157447815,0.5985131966469549,0.456913024187088,2.6860936758667258
+Harry Potter,Garrick Ollivander,0.503818154335022,0.002396181341466263,0.5423436760902405,6.208807353955185e-07
+Harry Potter,Pomona Sprout,0.45539116859436035,0.5991124690808854,0.47850966453552246,9.770096635335799
+Harry Potter,Cornelius Fudge,0.4691202938556671,0.7548587542510679,0.4327090084552765,2.140909251467067
+Harry Potter,Filius Flitwick,0.4815472960472107,1.630173243584934,0.49081820249557495,5.699280098700994
+Harry Potter,Dobby,0.44303780794143677,0.4778912061664301,0.4382060468196869,0.8355269958401482
+Harry Potter,Igor Karkaroff,0.43005916476249695,0.11855374966651813,0.42675143480300903,1.8359158127641428
+Harry Potter,Viktor Krum,0.4503336548805237,0.3972958215585611,0.48301851749420166,1.094180660764281
+Harry Potter,William Weasley,0.4686269164085388,0.45131348571162544,0.4364808201789856,0.6737239296386476
+Harry Potter,Parvati Patil,0.48635801672935486,4.846955889740885e-05,0.5227300524711609,7.186875188866861e-10
+Harry Potter,Dean Thomas,0.44226017594337463,0.3565506139410043,0.4590863287448883,1.938231684929948
+Harry Potter,Katie Bell,0.49190065264701843,0.00017692032392686345,0.5337795615196228,9.19940473134516e-10
+Harry Potter,Vincent Crabbe,0.49004191160202026,0.019456977760415054,0.5308857560157776,9.875276038060636e-07
+Harry Potter,Seamus Finnigan,0.43651920557022095,0.2414835028043452,0.4489688575267792,0.4189462516119065
+Harry Potter,Cormac McLaggen,0.5016866326332092,0.004568516479888395,0.5371431112289429,4.969169167102703e-05
+Harry Potter,Tom Riddle,0.4464183449745178,0.842250817484116,0.44793701171875,1.4625807262644352
+Harry Potter,Rubeus Hagrid,0.4621366262435913,0.6217401834300766,0.46903857588768005,2.956571016403709
+Harry Potter,Nagini,0.4437938332557678,0.27427114169787586,0.4489191472530365,0.4947935457785631
+Harry Potter,Ronald Weasley,0.46545127034187317,0.4721455504174071,0.4626748859882355,0.19015741303281533
+Harry Potter,Severus Snape,0.4444811940193176,0.6127870543993669,0.4277845025062561,0.9099762295329378
+Harry Potter,Bartemius Crouch Senior,0.44994276762008667,0.2890588740969484,0.44482386112213135,0.7820869080728803
+Harry Potter,Gabrielle Delacour,0.48260921239852905,0.4698235452780161,0.5222122669219971,0.22258158389690838
+Harry Potter,Gellert Grindelwald,0.4383646845817566,0.21742324995469517,0.44741809368133545,0.6002849490572053
+Harry Potter,Phineas Nigellus Black,0.45218130946159363,0.0005390007977480256,0.4390638470649719,6.186878413479388e-05
+Harry Potter,Ludovic Bagman,0.4904271066188812,0.010807523084789633,0.52522212266922,0.00014115728423393908
+Harry Potter,Quirinus Quirrell,0.44450414180755615,0.21893950148772454,0.44458237290382385,6.3021629019896235
+Harry Potter,Hermione Granger,0.45171305537223816,0.09333032988091433,0.4399486184120178,0.4492240611908405
+Harry Potter,Salazar Slytherin,0.4957578182220459,0.020773473902814445,0.5586127042770386,0.0020541716884802234
+Harry Potter,Aberforth Dumbledore,0.439961314201355,0.3804661496601506,0.4609413743019104,1.5795610558685258
+Harry Potter,Horace Slughorn,0.4715592563152313,0.3495036987996052,0.44217103719711304,0.7886036662666432
+Harry Potter,Walden Macnair,0.49835076928138733,0.07694563132644465,0.5421335101127625,0.00010629382129051809
+Harry Potter,Ariana Dumbledore,0.5067033767700195,0.010675544598360853,0.5837023854255676,2.2980763243528593e-05
+Harry Potter,Irma Pince,0.5231602191925049,0.0013375510480742622,0.5697351694107056,2.1363618379387013e-08
+Harry Potter,Nicolas Flamel,0.5289767980575562,0.05471130813623329,0.5383363962173462,0.000702023916279172
+Harry Potter,Amos Diggory,0.5232454538345337,0.045591212223854614,0.5543674826622009,1.5437027989862657e-05
+Harry Potter,Armando Dippet,0.4968133866786957,0.004074999992376017,0.5565256476402283,1.5875638654766323e-07
+Harry Potter,Lucius Malfoy,0.4317816197872162,0.5335285303747163,0.43922269344329834,0.5163010165856022
+Harry Potter,Hedwig,0.4928203523159027,0.005752510871408518,0.543062150478363,0.028617516962920145
+Harry Potter,Alice Longbottom,0.5431731939315796,0.7901745802159144,0.5435448884963989,0.08923437137931385
+Harry Potter,Pansy Parkinson,0.4770812690258026,7.574728996758107e-06,0.5257962346076965,3.058577800430968e-06
+Harry Potter,Kingsley Shacklebolt,0.5153641104698181,0.0021496749559594573,0.546042799949646,0.00032202513558434316
+Harry Potter,Aurora Sinistra,0.532209038734436,0.1915202459688135,0.5783342123031616,0.00012466565199010504
+Harry Potter,Septima Vector,0.5020323395729065,0.16285440558198566,0.5628880262374878,0.0005338140654824113
+Harry Potter,Nicholas de Mimsy-Porpington,0.4421016573905945,0.44839751137317907,0.44529038667678833,4.597857677043716
+Harry Potter,Draco Malfoy,0.45933642983436584,0.6527910367546879,0.4268490672111511,0.48572701934939094
+Harry Potter,Luna Lovegood,0.46457672119140625,0.6238259022389653,0.4613429009914398,1.5649159172310567
+Harry Potter,Ginevra Weasley,0.46553927659988403,0.44730148154555655,0.45598506927490234,0.34139308080476216
+Harry Potter,Poppy Pomfrey,0.49354714155197144,0.006568656269556662,0.5500788688659668,2.983172235607373e-09
+Harry Potter,Peeves,0.46058782935142517,1.233145913433488,0.45469802618026733,1.9797212847966739
+Harry Potter,Oliver Wood,0.512707531452179,0.012256833313550096,0.547406017780304,0.00022674668628380374
+Harry Potter,Godric Gryffindor,0.49263203144073486,0.16371885028054012,0.568062961101532,0.09509586287934976
+Harry Potter,Crookshanks,0.5288292169570923,0.21992403822504206,0.5692062377929688,0.006444083588860741
+Harry Potter,Rosmerta,0.5229362845420837,0.05011803963906756,0.5464492440223694,0.00012131785787824745
+Harry Potter,Angelina Johnson,0.4544048607349396,0.4981234406919799,0.46864229440689087,0.4752837056976108
+Harry Potter,Winky,0.5300799608230591,0.4985487122095663,0.5709412693977356,2.3353998210273716e-09
+Harry Potter,Rose Granger-Weasley,0.5170115232467651,0.03673488789629475,0.483674019575119,9.293178665924588e-05
+Harry Potter,Hugo Granger-Weasley,0.48420703411102295,1.3436759752415224,0.5218991041183472,0.9351466205920079
+Harry Potter,Petunia Dursley,0.47169366478919983,0.27644970188646806,0.4066319465637207,0.8251450452212465
+Harry Potter,Broderick Bode,0.49398112297058105,0.329247563996245,0.4099574387073517,6.715660929153429e-14
+Harry Potter,Augustus Rookwood,0.5091837048530579,0.635698482380658,0.5456185936927795,0.0487423070392276
+Harry Potter,Piers Polkiss,0.5051007866859436,0.8832067491982498,0.5188909769058228,1.1059280777187028
+Harry Potter,Fawkes,0.4881545305252075,0.0012893054242995817,0.5539714694023132,0.023576306291410897
+Harry Potter,Bartemius Crouch Junior,0.4792044460773468,0.3325170388966956,0.4619598388671875,0.6034569700781027
+Harry Potter,Molly Weasley,0.4311411678791046,0.508030470417897,0.41392049193382263,0.36118564456291663
+Harry Potter,Pius Thicknesse,0.4485996663570404,0.1960834275805505,0.512029767036438,6.7241387733858985e-06
+Harry Potter,Harry Potter,0.44439318776130676,0.08774943491120504,0.4571187496185303,0.47410911922067966
+Harry Potter,Aragog,0.4950214922428131,0.014602460809624802,0.5787279605865479,0.009291910747123561
+Harry Potter,Mosag,0.4920565187931061,0.8782917616700553,0.4586452543735504,0.1613183485781155
+Harry Potter,James Potter II,0.4318004846572876,0.0360627203947301,0.42947453260421753,0.07746482564960935
+Harry Potter,Arthur Weasley,0.45670226216316223,0.4251668865434073,0.41973280906677246,0.3568288856396605
+Harry Potter,Buckbeak,0.4945935606956482,0.10136327047692632,0.5605059266090393,0.019062546992555177
+Harry Potter,Ernest Prang,0.4916805922985077,0.9373748969752136,0.5365779995918274,0.47672090011207124
+Harry Potter,Amycus Carrow,0.5033026933670044,0.7280029986174057,0.5457427501678467,0.0002259872720320896
+Harry Potter,Hepzibah Smith,0.5040813088417053,0.014139892573018618,0.529179573059082,0.00169107452977919
+Harry Potter,Amelia Bones,0.4853380024433136,0.0026813490019191016,0.5207808613777161,6.355642097197657e-05
+Harry Potter,Anthony Goldstein,0.4943319261074066,0.1704634474134773,0.5273448824882507,0.028833739421960266
+Harry Potter,Corban Yaxley,0.5104935169219971,0.007254628891591217,0.5566244125366211,6.774941150943522e-05
+Harry Potter,Dilys Derwent,0.4831989109516144,0.7765530075643879,0.5175477862358093,0.5046744596562014
+Harry Potter,Hokey,0.5624920129776001,1.0192781336516665,0.44033166766166687,0.0007637035054289396
+Harry Potter,Penelope Clearwater,0.5008800029754639,0.14794512240935126,0.5388537645339966,0.034408135523120255
+Harry Potter,Selwyn,0.45378321409225464,0.37000917021196883,0.5304065346717834,0.264806229420868
+Harry Potter,Terry Boot,0.537269651889801,2.7182209576939864,0.5232139229774475,0.0006437601524779935
+Harry Potter,Zacharias Smith,0.48201674222946167,0.1741637705911051,0.5226630568504333,0.003674142606466274
+Harry Potter,Stanley Shunpike,0.5116391777992249,0.181094307668219,0.5294852256774902,0.005696741573304397
+Harry Potter,Ernest Macmillan,0.5520151257514954,0.0005296513114701285,0.5393288135528564,1.2384361177579228e-10
+Harry Potter,Eloise Midgen,0.5077885389328003,2.7310612733937654,0.47878018021583557,0.04019695546146333
+Harry Potter,Griphook,0.5197423696517944,0.2121497726817984,0.5715857148170471,0.006688551829028589
+Harry Potter,Alicia Spinnet,0.4855826497077942,0.02293066462942393,0.5529946684837341,1.947834401874541e-06
+Harry Potter,Ignotus Peverell,0.516290009021759,0.25397141316954513,0.5310029983520508,0.006411528662225803
+Harry Potter,Lee Jordan,0.46628209948539734,0.17443428352771834,0.47592130303382874,0.7629574823196934
+Harry Potter,Reginald Cattermole,0.49377742409706116,0.5067418689537313,0.5048711895942688,0.04439899363616947
+Harry Potter,Wilhelmina Grubbly-Plank,0.5114792585372925,0.2627456816056188,0.5711089372634888,0.13519002726235718
+Harry Potter,Rowena Ravenclaw,0.4968787431716919,0.19674952909766583,0.5475223064422607,0.015548044139341004
+Harry Potter,Herbert Beery,0.49589803814888,2.8443517176706856,0.4173962473869324,4.830428471126465e-05
+Harry Potter,Justin Finch-Fletchley,0.5097084641456604,0.003062946876082479,0.5399928092956543,2.086270966029308e-06
+Harry Potter,Elphias Doge,0.49515300989151,0.057937186284353306,0.5514315962791443,9.9745185061453e-05
+Harry Potter,Hannah Abbott,0.4619835317134857,0.00027472331944994885,0.5173898339271545,5.5516929050068714e-08
+Harry Potter,Marcus Flint,0.4845448136329651,0.1639240204812492,0.5477555394172668,0.05271337227701224
+Harry Potter,Millicent Bulstrode,0.5103342533111572,0.631463806450353,0.5328689217567444,0.02259667796253037
+Harry Potter,Marietta Edgecombe,0.5108231902122498,0.008964537626061198,0.5571213960647583,5.5660327967073295e-09
+Harry Potter,Evan Rosier,0.5070687532424927,0.6047670895687738,0.5272225737571716,0.946096669336646
+Harry Potter,Fat Friar,0.49053964018821716,0.06889725657833988,0.5633115768432617,0.0017602953084694779
+Harry Potter,Rolanda Hooch,0.47006678581237793,0.0014731959744198376,0.5546188950538635,7.939548695201769e-09
+Harry Potter,Antioch Peverell,0.5058832168579102,0.5252931342712709,0.5311525464057922,0.11465826547833487
+Harry Potter,Astoria Malfoy,0.4970783293247223,0.190809508483697,0.529772162437439,0.07472962995005278
+Harry Potter,Cadmus Peverell,0.5071195960044861,0.5474296398438426,0.5175254344940186,0.002919452880278532
+Harry Potter,Merope Riddle,0.49469834566116333,6.519697389556579e-05,0.5378767848014832,1.287292492079564e-08
+Harry Potter,Dexter Fortescue,0.4944321811199188,0.3049940554906587,0.5033755898475647,0.15965660552299948
+Harry Potter,Susan Bones,0.48266303539276123,0.00025030030206157444,0.5379840731620789,1.6962886640131738e-06
+Harry Potter,Silvanus Kettleburn,0.48836106061935425,0.04954424440078134,0.551296055316925,0.00048364165390258636
+Harry Potter,Antonin Dolohov,0.4867320656776428,0.05079084899966955,0.5726486444473267,0.04437852133566685
+Harry Potter,Frank Longbottom,0.5271263718605042,0.7464672395819767,0.5695174932479858,0.1059748523121532
+Harry Potter,Michael Corner,0.4988413453102112,0.058754452996483485,0.5366568565368652,3.6123695556515667e-06
+Harry Potter,Vernon Dursley,0.42006003856658936,0.37807521127231,0.40791621804237366,0.6485248124973402
+Harry Potter,Marlene McKinnon,0.4997681975364685,1.0778865364223542,0.5648558735847473,6.969568768398113
+Harry Potter,Dorcas Meadowes,0.5020419359207153,0.8510681106231619,0.5624010562896729,5.287947262297381
+Harry Potter,Emeric the Evil,0.5236355662345886,1.2274087848206603,0.49173834919929504,0.20504452048340394
+Harry Potter,Dennis Creevey,0.43204939365386963,0.00026934619134328904,0.5151597261428833,0.0031360967163570398
+Harry Potter,Mykew Gregorovitch,0.5075162649154663,0.14930345023335617,0.5679985284805298,0.0237588694113758
+Harry Potter,George Weasley,0.44044938683509827,0.41697020432828846,0.4440170228481293,0.7843570431009249
+Harry Potter,Fred Weasley,0.44706523418426514,0.5032112406830574,0.43204227089881897,0.48276660062839105
+Harry Potter,Gregory Goyle,0.5011573433876038,0.03125490999964649,0.5086613297462463,6.408402044599532e-07
+Harry Potter,Galatea Merrythought,0.5011895895004272,0.5909507568635429,0.5282485485076904,0.6287319449509017
+Harry Potter,Victoire Weasley,0.4720750153064728,0.4276157716772918,0.48534247279167175,0.06576483999709101
+Harry Potter,Fat Lady,0.4809730350971222,0.04667547112077928,0.561942458152771,0.00040296274186763745
+Harry Potter,Dominique Weasley,0.48991844058036804,0.6449057551469597,0.47528448700904846,0.5957920297400371
+Harry Potter,Beedle the Bard,0.5139830708503723,2.351525735391917,0.5576179623603821,7.8120300997627075
+Harry Potter,Ignatius Prewett,0.48718011379241943,0.4932023043441723,0.488484650850296,1.3301790440941101
+Harry Potter,Sirius Black I,0.46859583258628845,0.7059911402560716,0.48095497488975525,0.8610087759781347
+Harry Potter,Regulus Black I,0.47912079095840454,0.6593988606866316,0.4364158809185028,0.42981114080842925
+Harry Potter,Louis Weasley,0.4619789123535156,0.6541484749566165,0.45581889152526855,0.25788898403288235
+Harry Potter,Abraxas Malfoy,0.5116108059883118,2.722284294512817,0.528152585029602,0.4422563753049664
+Harry Potter,Sirius Black II,0.4327516555786133,0.8886807851402021,0.43735450506210327,1.8498079363900768
+Harry Potter,Irma Crabbe,0.4557207524776459,0.500078484182119,0.4326726794242859,0.41281717401334383
+Harry Potter,Porpentina Goldstein,0.45215776562690735,0.2964207283392433,0.4645043909549713,0.7958961882745091
+Harry Potter,Radolphus Lestrange,0.46985742449760437,1.1205566695908122,0.4993914067745209,1.2897678913866335
+The Hunger Games,Gale Hawthorne,0.42366454005241394,0.10300749056187275,0.4258776605129242,1.255525948885735
+The Hunger Games,Katniss Everdeen,0.4845563769340515,0.10998829378260558,0.48172080516815186,1.510473580635784
+The Hunger Games,Effie Trinket,0.5257339477539062,7.600320478722381e-05,0.5627817511558533,5.603280758866763e-09
+The Hunger Games,Coriolanus Snow,0.4579843580722809,0.9129836935607708,0.43327534198760986,1.7283589187069268
+The Hunger Games,Mrs. Everdeen,0.4923335909843445,0.7042889756336381,0.5165349841117859,0.03543207328886845
+The Hunger Games,Cinna,0.41759371757507324,0.21976451201320993,0.4101203382015228,0.86300689448745
+The Hunger Games,Primrose Everdeen,0.5308364033699036,0.0677685928736543,0.5819083452224731,1.9057133578728183e-09
+The Hunger Games,Peeta Mellark,0.4702422022819519,0.2513760184516798,0.4527166187763214,1.5649296585705363
+The Hunger Games,Haymitch Abernathy,0.47196638584136963,0.2746674204021431,0.4384087026119232,1.578231987325297
+The Hunger Games,Alma Coin,0.48868462443351746,0.468572036166586,0.5130079388618469,2.4837320395148105e-05
+The Hunger Games,Casca Highbottom,0.49404099583625793,0.058793688694986704,0.4224543273448944,1.39184211373001e-09
+The Hunger Games,Livia Cardew,0.48630741238594055,1.0567839430066268,0.43594691157341003,0.00024317957677372738
+The Hunger Games,Palmyra Monty,0.48756036162376404,1.3511176525859887,0.5095211863517761,6.760745934657895
+The Hunger Games,Persephone Price,0.4980650544166565,1.0352154585984468,0.42776963114738464,0.00012282943440178562
+The Hunger Games,Festus Creed,0.5137855410575867,0.4488251552008332,0.43041476607322693,1.1489411666581085e-07
+The Hunger Games,Vipsania Sickle,0.4687986373901367,0.6673281913400538,0.43177270889282227,0.009349649124944152
+The Hunger Games,Hilarius Heavensbee,0.4951103627681732,0.7161396962156683,0.5291473865509033,1.248197113966103
+The Hunger Games,Domitia Whimsiwick,0.5107499361038208,1.2525096842108003,0.45315176248550415,0.011877676794797221
+The Hunger Games,Clemensia Dovecote,0.4968879520893097,0.017061128839792182,0.523499071598053,0.00010082220307434685
+The Hunger Games,Felix Ravinstill,0.5016847252845764,2.291929646798982,0.44192275404930115,0.012210427503161486
+The Hunger Games,Lysistrata Vickers,0.5213034749031067,0.39224862090012536,0.4561924338340759,3.3421247235283436e-10
+The Hunger Games,Lucretius Flickerman,0.49776577949523926,0.16294159618484738,0.5035482048988342,0.013322725307153543
+The Hunger Games,Reaper Ash,0.495331734418869,0.29435981988400833,0.4755721688270569,4.42166229972471e-06
+The Hunger Games,Mizzen,0.52642822265625,0.06473412977502771,0.45696794986724854,8.724480654851951e-09
+The Hunger Games,Coral,0.5041736960411072,0.03641299198373558,0.5333076119422913,0.0004916603460607651
+The Hunger Games,Treech,0.4814959466457367,0.12059400275848751,0.5275515913963318,0.020602210904005012
+The Hunger Games,Dill,0.4879874885082245,0.9374200025446748,0.4474625587463379,0.000871508592169862
+The Hunger Games,Mr. Everdeen,0.48485204577445984,0.5731186643948403,0.551663875579834,0.2182264292785018
+The Hunger Games,Volumnia Gaul,0.4893183410167694,0.021773046974852356,0.46757641434669495,1.959404559271376e-11
+Dune,Jamis,0.49763989448547363,0.4601588857393504,0.5307971239089966,0.06207339984855977
+Dune,Glossu Rabban Harkonnen,0.5234180092811584,1.040041833978686,0.5540176033973694,1.0121267484968228
+Dune,Hwi Noree,0.5152681469917297,1.0109310615941849,0.5379900336265564,0.1518654640025657
+Dune,Kailea Vernius,0.5006990432739258,1.2819557902143066,0.5422921180725098,0.5114421845178385
+Dune,Helena Atreides,0.4995802938938141,0.6356990969259351,0.54463130235672,0.2251058415891642
+Dune,Piter de Vries,0.5492623448371887,0.7538928578703038,0.5279063582420349,0.18567996521736058
+Dune,Ghanima Atreides,0.4968699812889099,0.8369040135915626,0.5,0.01904128896529533
+Dune,Farad'n Corrino,0.5331212282180786,1.39141791221437,0.5365118384361267,0.8225163626763393
+Dune,Fafnir Corrino,0.4420830011367798,0.5381058274066916,0.5618007779121399,1.7667567655319762
+Dune,Dominic Vernius,0.4946116507053375,0.43897956343990446,0.5052716732025146,0.10235119719954673
+Dune,Faykan Butler,0.45419275760650635,0.024926538000089177,0.45998162031173706,5.552317787712694e-05
+Dune,Thufir Hawat,0.5405481457710266,0.892894372436974,0.5702359080314636,0.022892264909911992
+Dune,Kwisatz Haderach,0.5288949608802795,0.6063077049825519,0.5786898732185364,0.0036976193062390806
+Dune,Duncan Idaho,0.4306022822856903,0.7432827616520465,0.45693251490592957,0.9857971180461219
+Dune,Paul Atreides,0.475263386964798,1.0560189280223136,0.46890854835510254,0.6532529036219028
+Dune,Wellington Yueh,0.5484734773635864,1.4385439187818103,0.5203583240509033,0.0994703658148618
+Dune,Miles Teg,0.557686984539032,0.6657775249077086,0.5225208401679993,0.039195871421940344
+Dune,Vladimir Harkonnen,0.5316377282142639,0.10214860371075954,0.5116723775863647,0.02433002084809396
+Dune,Darwi Odrade,0.536483108997345,0.008337277409029247,0.551603376865387,8.973373912887733e-05
+Dune,Wensicia Corrino,0.5032896399497986,0.5391336026435865,0.5432509183883667,0.49047363401336763
+Dune,Irulan Corrino,0.5143793225288391,0.3348602584278514,0.5152375102043152,0.009471606763026766
+Dune,Gurney Halleck,0.5444483160972595,0.5339192887295017,0.5520815253257751,0.01894974708965885
+Dune,Shaddam Corrino IV,0.4909803569316864,0.4651901071166394,0.484107106924057,0.2707872922110672
+Dune,Leto Atreides II,0.435349702835083,0.35848396496903695,0.4671371281147003,1.162239077604367
+Dune,Lucilla,0.5099300146102905,1.401826511542328,0.5361635088920593,0.41528920263869196
+Dune,Margot Fenring,0.5196251273155212,1.287933709101413,0.587001383304596,0.42810665262307374
+Dune,Murbella,0.5210418105125427,0.46735401137037125,0.5690101981163025,0.023731765380587813
+Dune,Octa Butler,0.48807811737060547,0.6217094571902593,0.4583471715450287,0.11959544378169995
+Dune,Mohandas Suk,0.49800941348075867,0.777807305732695,0.45198869705200195,0.033598502310195705
+Dune,Pardot Kynes,0.5138195753097534,0.3631800161713162,0.54291170835495,0.11880064880699287
+Dune,Raquella Berto-Anirul,0.4950067400932312,0.6736190323678809,0.5390993356704712,0.2187400306732795
+Dune,Norma Cenva,0.4278644323348999,0.34530067188127167,0.5005387663841248,1.3120403501803808
+Dune,Norma Cevna,0.504818320274353,0.9409330621425289,0.528740406036377,0.6579051343177426
+Dune,Gaius Helen Mohiam,0.5053005814552307,0.5883202830516998,0.5277078747749329,0.486373662513295
+Dune,Vorian Atreides,0.4654883146286011,0.4810919510127356,0.46167615056037903,0.6263714752302175
+Dune,Tlaloc,0.5012189149856567,0.7486960201354296,0.4670414626598358,0.07078680570844502
+Dune,Hayt,0.5157616138458252,1.1372734117655994,0.5824491381645203,3.2943028197495816
+Dune,Rhombur Vernius,0.47085869312286377,0.17106364797786422,0.5153435468673706,0.33119513789079225
+Dune,Scytale,0.5065829157829285,0.9726858826492011,0.533208966255188,0.022154572232380525
+Dune,Serena Butler,0.5059524774551392,0.13681074269120966,0.5279148817062378,6.365008228406361e-05
+Dune,Victor Atreides,0.5053738355636597,0.8780493156250442,0.5219838619232178,0.6441260360836389
+Dune,Shando Vernius,0.4806986451148987,0.7962298181287515,0.5127638578414917,0.5847123792064839
+Dune,Xavier Harkonnen,0.5392778515815735,0.6381818425165202,0.5134580135345459,0.001458326007678648
+Dune,Zufa Cenva,0.49050623178482056,0.45877982334525585,0.5150262713432312,0.04184900948173237
+Dune,Schwangyu,0.5012314915657043,1.6685941357419745,0.5462884306907654,2.5507629847737583
+Dune,Siona Atreides,0.525655210018158,0.760034515851974,0.5492343306541443,0.11378823488694567
+Dune,Tylwyth Waff,0.509412407875061,1.1437715938563466,0.451562762260437,0.05898334373052907
+Dune,Anirul Corrino,0.477915495634079,0.5785452349450044,0.5409173369407654,2.184465197505393
+Dune,Sheeana Brugh,0.5043639540672302,0.5192340812650102,0.5130999088287354,0.0895398934958691
+Dune,Stilgar,0.497808039188385,0.5928535300523808,0.5342109203338623,0.09811989177677183
+Dune,Hasimir Fenring,0.5153005123138428,1.5385246976002915,0.5507274270057678,1.985205607864301
+Dune,Leto Atreides I,0.48654621839523315,0.4045024875718957,0.49626630544662476,0.02422745562236205
+Dune,Minotauros Atreides,0.49368005990982056,1.0926740097497365,0.5304136872291565,0.16838277856519127
+Dune,Paulus Atreides,0.5223171710968018,0.5989475974585898,0.5316929817199707,0.503055581073807
+Dune,Abulurd Harkonnen,0.4777737557888031,0.4430687366664146,0.5658541917800903,1.8616959975219078
+Dune,Alma Mavis Taraza,0.5058012008666992,1.3960892703791923,0.46921518445014954,0.0009345553339762017
+Dune,Jessica Atreides,0.4802019000053406,0.4161399961091878,0.4718683063983917,0.09304195185544097
+Dune,Chani Kynes,0.519978404045105,0.6258408328432263,0.507990837097168,0.180328439611831
+Dune,Feyd-Rautha Harkonnen,0.5162959694862366,1.1772004111068048,0.4807954728603363,0.3885420702506556
+Dune,Alia Atreides,0.4822726249694824,0.5359149302021396,0.4800700843334198,0.39775995186399943
+Dune,Liet Kynes,0.5239098072052002,1.0709727789405448,0.5483620166778564,0.35537517762208515
+Dune,Moneo Atreides,0.4783799648284912,0.4628469607843391,0.5353717803955078,0.010203245311976824
+Dune,Tio Holtzman,0.5274263620376587,0.515163478282085,0.5471019148826599,0.023445820625701094
+Dune,Tyros Reffa,0.4748443067073822,0.6859880148079427,0.5335310101509094,0.5082867544066283
+Dune,Harah,0.4790497124195099,0.7169978842049008,0.5105242133140564,0.0005540146539405871
+Dune,Elrood Corrino IX,0.45676568150520325,0.47899643679439025,0.525293231010437,3.7146401915141745
+Dune,Jehanne Butler,0.4977191984653473,1.160610281011882,0.5291381478309631,0.3361658627024659
+Dune,Ramallo,0.4951575994491577,0.9747248480575009,0.5169386267662048,0.7338078391350811
+Dune,Lady Jessica,0.527129590511322,1.0146471545741353,0.4561772346496582,0.07746158410619693
+Dune,Leto I Atreides,0.4957177937030792,0.07135384679693386,0.5478718876838684,0.0030558619291654746
+Dune,Glossu Rabban,0.5158686637878418,0.9112838441922432,0.5629866123199463,0.508575296527618
+Dune,Piter De Vries,0.5379225611686707,0.5988211129863868,0.526581346988678,0.9056059650963464
+Dune,Mapes,0.46134498715400696,0.4332185225362724,0.542668879032135,0.9897316516413973
+Dune,Liet-Kynes,0.5251116156578064,0.7487577307844114,0.544280469417572,1.1512298258502456
+Dune,Otheym,0.4998379051685333,0.9750208906999622,0.5489936470985413,1.6242758905300028
+Dune,Korba,0.5202980637550354,0.9192963856136979,0.5151761174201965,1.1408700423282307
+Dune,Aramsham,0.4641391634941101,0.4544422923442146,0.48457738757133484,0.8127568286147788
+Dune,Iakin Nefud,0.4791484773159027,0.2640108801746009,0.5199869871139526,0.9827098352469199
+Dune,Edric,0.507510781288147,0.7897315362747146,0.5188983678817749,0.3442989693934002
+Dune,Bijaz,0.5071148872375488,0.6462591931943981,0.5156342387199402,0.05048940659611583
+Dune,Lichna,0.4484536945819855,0.32097504172457736,0.5563443303108215,1.0913543433308168
+Dune,Farok,0.4914568066596985,0.2526507789809973,0.513570249080658,1.2662744929738627
+Dune,Tyekanik,0.48693060874938965,0.5850379611842363,0.5662058591842651,3.932007099913652
+Dune,Ziarenko Javid,0.48177507519721985,1.2164039946239296,0.41237857937812805,0.003792379896148076
+Dune,Tertius Eileen Anteac,0.5171214938163757,0.9599716945215118,0.5144374966621399,0.2580117834826692
+Dune,Nayla,0.47869688272476196,0.6588261872586595,0.5447108149528503,2.623758321475784
+Dune,Quintinius Violet Chenoeh,0.48659810423851013,0.05915107731458808,0.5501734614372253,0.00018306463335155082
+Dune,Marcus Claire Luyseyal,0.48818811774253845,1.7373048076086937,0.5107654929161072,2.478274282387815
+Dune,Bellonda,0.5110083222389221,1.4250345742292878,0.5434740781784058,1.3193854440253319
+Dune,Alef Burzmali,0.4665161967277527,0.982452391156567,0.5583000183105469,2.8614034158046753
+Dune,Hedley Tuek,0.4864819049835205,0.7164396077823254,0.5281742215156555,1.6249834470605626
+Dune,Marty,0.4792817533016205,0.7280740326746394,0.49935731291770935,0.15888637586410678
+Dune,Daniel,0.5189328193664551,1.777215148070226,0.5223586559295654,0.8866722560686856
+Dune,Dama,0.47212088108062744,0.6013867057595473,0.5488190054893494,1.4037338737375893
+Dune,Logno,0.5146032571792603,1.9106249426370374,0.5130771398544312,0.2751950568614628
+Twilight,Bella Swan,0.48326900601387024,0.7247361055412566,0.43254128098487854,0.7365166028075251
+Twilight,Edward Cullen,0.4666794240474701,0.5013275577784613,0.44172605872154236,0.7004989690636835
+Twilight,Jacob Black,0.45409366488456726,0.46251454591712143,0.4554227590560913,0.9136896879693293
+Twilight,Carlisle Cullen,0.48934441804885864,0.5470750454476965,0.4620271921157837,0.8207067943634325
+Twilight,Esme Cullen,0.45789098739624023,0.5591127561137629,0.4658684730529785,1.4789642675409154
+Twilight,Alice Cullen,0.45514026284217834,0.4886141848937854,0.43589478731155396,0.7026150625583587
+Twilight,Emmett Cullen,0.46178871393203735,0.5848577373289497,0.49181410670280457,1.229882472097879
+Twilight,Rosalie Hale,0.4466276168823242,0.18814152265382128,0.4639298915863037,0.5961943617331009
+Twilight,Jasper Hale,0.4543110728263855,0.6303001184999871,0.46010643243789673,1.2542932210216382
+Twilight,Renesmee Cullen,0.45336928963661194,0.2003957739847295,0.449503093957901,0.6336242809830143
+Twilight,Aro,0.46222934126853943,0.32037646275100573,0.4565318822860718,1.161789809079368
+Twilight,Caius,0.5130079388618469,0.0324452166180314,0.5396819114685059,1.3461209166752e-05
+Twilight,Marcus,0.4986763596534729,0.09553676269226563,0.5225239396095276,0.0002537160997281344
+Twilight,Jane,0.5112879276275635,0.029926092488611125,0.5439932942390442,2.910038324003551e-06
+Twilight,Alec,0.4980205297470093,5.69776817895873e-05,0.5346221327781677,0.00019588846721213348
+Twilight,Demetri,0.5160878300666809,0.03620021807630833,0.5621719360351562,3.9644251217159456e-05
+Twilight,Felix,0.5224718451499939,0.06938209016410012,0.5610885620117188,0.000580389826992543
+Twilight,Heidi,0.4885730743408203,0.3045840415013427,0.43669769167900085,5.880942816393087e-09
+Twilight,Santiago,0.48533740639686584,0.5795768905286605,0.4533033072948456,0.0020581764638911274
+Twilight,James,0.4843384027481079,0.00019328727710063448,0.5438632369041443,7.270975490567213e-08
+Twilight,Victoria,0.46946388483047485,0.2857169373512564,0.44366422295570374,0.6672516060492069
+Twilight,Laurent,0.4904041588306427,0.09040309746137612,0.526269257068634,0.0005237260678788038
+Twilight,Riley Biers,0.49015605449676514,0.05270976324361161,0.5125524401664734,0.00016484236058231447
+Twilight,Bree Tanner,0.48958608508110046,0.0027619723448355513,0.5328367352485657,4.932456015088068e-07
+Twilight,Mexican coven,0.4761538505554199,0.617374634753648,0.5001615285873413,0.27962887487339805
+Twilight,Zafrina,0.4987817406654358,0.20252330825793444,0.5411728620529175,0.0028857634549945294
+Twilight,Senna,0.5137305855751038,0.7764955341732236,0.4481615424156189,0.0017553523032034879
+Twilight,Kachiri,0.511444628238678,1.4315984983905041,0.4789679944515228,0.10024542529216038
+Twilight,Peter,0.4881868064403534,0.005513947376631325,0.5223390460014343,3.571632622594117e-07
+Twilight,Charlotte,0.475521981716156,0.2087086186847072,0.5121451616287231,0.003702660980483653
+Twilight,Mary,0.4860509932041168,0.7267298526381508,0.4364092946052551,0.007932224446656456
+Twilight,Randall,0.5105313062667847,0.7926058356692781,0.5362573862075806,0.6021940269499707
+Twilight,Eleazar,0.5092800259590149,0.07703313822663524,0.5318141579627991,0.004268626934124416
+Twilight,Carmen,0.4952872097492218,0.0745045757995176,0.4914722442626953,0.0001590148750256677
+Twilight,Tanya,0.49481457471847534,0.027975104298779273,0.5204833745956421,8.002467585252991e-06
+Twilight,Kate,0.5082175731658936,0.009290900796765493,0.5042892098426819,3.1359330333416675e-06
+Twilight,Garrett,0.4824860095977783,0.1012772571840393,0.5020184516906738,0.017061872821195364
+Twilight,Sasha,0.5193257927894592,1.3981505767285414,0.49528419971466064,0.039143050060378305
+Twilight,Vasilii,0.4976499378681183,1.2087148512733323,0.524995744228363,0.7252319602120292
+Twilight,Irina,0.470138281583786,0.0024579253265756343,0.5045228004455566,1.5320277844471137e-05
+Twilight,Tia,0.4881680905818939,0.12122289337258306,0.5040939450263977,6.961811002554015e-05
+Twilight,Amun,0.507752001285553,0.053266414358660426,0.4271788001060486,2.712021848055028e-15
+Twilight,Benjamin,0.5235551595687866,0.2703467370796133,0.5271337032318115,1.699205690109834e-05
+Twilight,Kebi,0.5147552490234375,0.37210882399345485,0.4482637941837311,6.781821117860223e-06
+Twilight,Nahuel,0.48061612248420715,0.003023775940088949,0.4974486529827118,4.074022142901286e-06
+Twilight,Huilen,0.4822165071964264,0.09705484856845034,0.49867066740989685,0.009951900370045405
+Twilight,Sam Uley,0.4465399384498596,0.15808711108635265,0.46522754430770874,0.715707424161233
+Twilight,Quil Ateara V,0.5131765604019165,0.5332838435645686,0.5482388138771057,0.004660270887473742
+Twilight,Embry Call,0.47361963987350464,0.6357518729027831,0.5026254653930664,0.0012737631569219316
+Twilight,Paul Lahote,0.4901941120624542,0.005323837735816981,0.5420800447463989,0.00032426603144372927
+Twilight,Jared Cameron,0.48764532804489136,0.20275525682606105,0.5355255603790283,0.005256175872848222
+Twilight,Leah Clearwater,0.5022733211517334,0.004117591469258947,0.5302123427391052,4.384355372214623e-07
+Twilight,Seth Clearwater,0.49286770820617676,0.00034100763530345405,0.5481706857681274,1.3472232653530088e-08
+Twilight,Collin Littlesea,0.4267337918281555,0.19592904911513845,0.42828840017318726,0.2757751569891288
+Twilight,Brady Fuller,0.414991170167923,0.2777492253670694,0.3972945213317871,0.22229731547470863
+Twilight,Ephraim Black,0.4526848793029785,0.5033109066451074,0.5182806849479675,0.0905480977732796
+Twilight,Charlie Swan,0.42138567566871643,0.4498903362913304,0.4323393404483795,1.666027704785033
+Twilight,Harry Clearwater,0.5209450721740723,2.484474152342066,0.5603581666946411,0.29540960213373585
+Twilight,Billy Black,0.5037399530410767,0.001175325480440144,0.5305846333503723,3.265647245663343e-08
+Twilight,Tyler Crowley,0.5106507539749146,0.3095111610422963,0.5545148253440857,0.062295437912479344
+Twilight,Lauren Mallory,0.4392404556274414,0.4852898117353846,0.550056517124176,0.09533848961212787
+Twilight,Mike Newton,0.5011588335037231,0.141374825432708,0.5439550280570984,0.0013103557818105703
+Twilight,Jessica Stanley,0.45442330837249756,0.06622942713126397,0.5578256249427795,9.421336264408825e-05
+Twilight,Angela Weber,0.5035080909729004,0.021168630805939507,0.5863727331161499,0.00019963637726144005
+Twilight,Eric Yorkie,0.500085711479187,0.7050694621904712,0.5122127532958984,0.041648509826170396
+Twilight,Emily Young,0.4844948351383209,0.024171520341315887,0.5346267819404602,0.007170460737384733
+Twilight,Sue Clearwater,0.5133580565452576,0.4495958995794494,0.5527017712593079,0.007788496248674787
+Twilight,Quil Ateara III,0.5086703896522522,1.3297878312640377,0.5293914675712585,0.6705967737576495
+Twilight,Rachel Black,0.45956912636756897,0.2576749869769676,0.5207951068878174,0.01904188307577297
+Twilight,Rebecca Black,0.4915410280227661,0.918700707863489,0.4832848310470581,0.2138217510688916
+Twilight,J. Jenks,0.5021610856056213,1.1749364426276203,0.5415865182876587,0.0636886270483935
+The Hitchhiker's Guide to the Galaxy,Zaphod Beeblebrox,0.5275055766105652,2.5190144602293043,0.5366738438606262,1.3347223191943813
+The Hitchhiker's Guide to the Galaxy,Arthur Dent,0.5104024410247803,0.8366529425578266,0.5278688669204712,0.082972603286987
+The Hitchhiker's Guide to the Galaxy,Marvin,0.5021483302116394,0.177914968284265,0.5272244215011597,0.08662219902750602
+The Hitchhiker's Guide to the Galaxy,Trillian,0.5225361585617065,0.996532849187981,0.5575310587882996,0.0165348861052277
+The Hitchhiker's Guide to the Galaxy,Ford Prefect,0.5028705596923828,0.04878598763319378,0.5226372480392456,8.812170712380995e-05
+The Hitchhiker's Guide to the Galaxy,Fenchurch,0.49882397055625916,1.7435050866733504,0.5471852421760559,2.912782449467957
+The Hitchhiker's Guide to the Galaxy,Deep Thought,0.5161760449409485,3.2984400268379335,0.549379825592041,4.211763750328522
+The Hitchhiker's Guide to the Galaxy,Eddie the Computer,0.5143842101097107,0.7717861588622849,0.5626307725906372,0.6785563358824518
+The Hitchhiker's Guide to the Galaxy,Agrajag,0.44475430250167847,0.5815409131055337,0.5186874866485596,0.4742841304472363
+The Hitchhiker's Guide to the Galaxy,Alice Beeblebrox,0.49574699997901917,0.4318234513995117,0.5307592153549194,1.3733599282782643
+The Hitchhiker's Guide to the Galaxy,Allitnils,0.4362064599990845,0.3346937327370324,0.4490273594856262,1.3148000331605807
+The Hitchhiker's Guide to the Galaxy,Almighty Bob,0.4905094504356384,0.5909423247352042,0.5002472996711731,1.8500763634272333
+The Hitchhiker's Guide to the Galaxy,Barman,0.4852215051651001,0.892994337150235,0.53641277551651,1.711163363707738
+The Hitchhiker's Guide to the Galaxy,Caveman,0.44932815432548523,0.8532140394068194,0.48430880904197693,1.553444973821121
+The Hitchhiker's Guide to the Galaxy,Colin the Security Robot,0.49993428587913513,0.976077601788665,0.5409278273582458,1.6172071174414118
+The Hitchhiker's Guide to the Galaxy,Constant Mown,0.4446617364883423,0.3350513854588815,0.5357279777526855,1.584814100444835
+The Hitchhiker's Guide to the Galaxy,Dan Streetmentioner,0.498556911945343,2.340850178909282,0.5353698134422302,2.7769580586537597
+The Hitchhiker's Guide to the Galaxy,Ameglian Major Cow,0.4171024262905121,2.9120419274441147,0.5268067121505737,0.45047551339436726
+The Hitchhiker's Guide to the Galaxy,Eccentrica Gallumbits,0.48063889145851135,0.7287256191933535,0.5445906519889832,3.1585595168792167
+The Hitchhiker's Guide to the Galaxy,Effrafax of Wug,0.36155328154563904,0.5747696922517948,0.5487123727798462,1.4568019978263285
+The Hitchhiker's Guide to the Galaxy,Elvis Presley,0.41191357374191284,0.3797633349632493,0.4842294752597809,0.9820755274381016
+The Hitchhiker's Guide to the Galaxy,Emperor of the Galaxy,0.3234719932079315,0.31469537043386514,0.491729736328125,3.674797705066148
+The Hitchhiker's Guide to the Galaxy,Enid Kapelsen,0.32918861508369446,0.4689406271306049,0.5459449291229248,1.4302111081764555
+The Hitchhiker's Guide to the Galaxy,Frankie and Benjy,0.49847081303596497,2.5810864289806434,0.529952347278595,0.4140006520024565
+The Hitchhiker's Guide to the Galaxy,Gag Halfrunt,0.49259117245674133,0.4585229949614054,0.5219084620475769,2.089596555298776
+The Hitchhiker's Guide to the Galaxy,Gail Andrews,0.39950090646743774,0.2916095789617625,0.5061356425285339,2.9196618626207154
+The Hitchhiker's Guide to the Galaxy,Pizpot Gargravarr,0.3139541745185852,0.36225277548392054,0.5040284395217896,1.7450559279627826
+The Hitchhiker's Guide to the Galaxy,Garkbit,0.4613436758518219,0.31911230910253396,0.570511519908905,1.989116101173145
+The Hitchhiker's Guide to the Galaxy,Genghis Khan,0.42454302310943604,0.5671710824059766,0.47117817401885986,0.9350327254876517
+The Hitchhiker's Guide to the Galaxy,God,0.4983585476875305,2.8091519963663796,0.5157172679901123,3.0529506712113665
+The Hitchhiker's Guide to the Galaxy,Golgafrinchans,0.4871600866317749,0.8266789267927431,0.5505149364471436,0.5544471130203781
+The Hitchhiker's Guide to the Galaxy,Agda,0.5112017393112183,0.009029096827134648,0.5287888646125793,2.0108659815402974
+The Hitchhiker's Guide to the Galaxy,Mella,0.481859028339386,0.35364857791186266,0.5125448703765869,3.864342768834259
+The Hitchhiker's Guide to the Galaxy,Captain,0.42733311653137207,0.5894758492741129,0.514319658279419,1.5084252457658365
+The Hitchhiker's Guide to the Galaxy,Great Circling Poets of Arium,0.506938099861145,3.108880398631926,0.5965229272842407,3.48934247178035
+The Hitchhiker's Guide to the Galaxy,Number One,0.4428691864013672,0.37735007984326807,0.5162665843963623,1.3980420177545387
+The Hitchhiker's Guide to the Galaxy,Number Two,0.4727605879306793,0.5012591080546629,0.5356969833374023,1.3391366496133605
+The Hitchhiker's Guide to the Galaxy,Googleplex Starthinker,0.49622243642807007,0.7685041051682417,0.5458606481552124,1.4505408685686187
+The Hitchhiker's Guide to the Galaxy,Great Green Arkleseizure,0.4108148217201233,0.37656494065703944,0.5255516171455383,3.2461254405061775
+The Hitchhiker's Guide to the Galaxy,Great Hyperlobic Omnicognate Neutron Wrangler,0.35632264614105225,0.2968392444170915,0.538632333278656,2.1133419090118393
+The Hitchhiker's Guide to the Galaxy,Grunthos the Flatulent,0.43799063563346863,0.5884191202262163,0.5056959390640259,2.533949332902004
+The Hitchhiker's Guide to the Galaxy,Guide Mark II,0.46577438712120056,2.6638920729684443,0.546289324760437,1.8161691216455371
+The Hitchhiker's Guide to the Galaxy,Hactar,0.3989352285861969,0.7576601357314339,0.5416232943534851,0.7711947173289098
+The Hitchhiker's Guide to the Galaxy,Haggunenons,0.4784274697303772,0.061146314832198866,0.5442502498626709,0.7159532413067194
+The Hitchhiker's Guide to the Galaxy,Heimdall,0.42214062809944153,0.5328247346765147,0.5135451555252075,1.5380454113971236
+The Hitchhiker's Guide to the Galaxy,Hig Hurtenflurst,0.4581269919872284,1.1332647343625395,0.5268608331680298,1.5980802997919823
+The Hitchhiker's Guide to the Galaxy,Hillman Hunter,0.5095416307449341,1.5175507068390162,0.5432708859443665,1.765636244338519
+The Hitchhiker's Guide to the Galaxy,Hotblack Desiato,0.4843844473361969,0.5442587992941074,0.5540450811386108,3.018975403984865
+The Hitchhiker's Guide to the Galaxy,Humma Kavula,0.4809923768043518,1.4779388286769861,0.5316513776779175,5.388965569722715
+The Hitchhiker's Guide to the Galaxy,Hurling Frootmig,0.3751438558101654,0.6959301679234284,0.5332341194152832,4.491338073974113
+The Hitchhiker's Guide to the Galaxy,Judiciary Pag,0.4077497720718384,0.38792008278098716,0.5317800045013428,3.631708981719817
+The Hitchhiker's Guide to the Galaxy,Know-Nothing-Bozo,0.3952953815460205,0.27833154003260013,0.5313194990158081,1.1784301044494518
+The Hitchhiker's Guide to the Galaxy,Prostetnic Vogon Kwaltz,0.4778609573841095,2.822516176868135,0.5188731551170349,6.296859603897407
+The Hitchhiker's Guide to the Galaxy,Lady Cynthia Fitzmelton,0.47013455629348755,0.47453467681040995,0.5366035103797913,0.945121595575924
+The Hitchhiker's Guide to the Galaxy,Lajestic Vantrashell of Lob,0.49462902545928955,0.788683257973526,0.5325215458869934,1.4285529850515457
+The Hitchhiker's Guide to the Galaxy,Lallafa,0.38552477955818176,0.3642502465315573,0.5176847577095032,0.9539209876634651
+The Hitchhiker's Guide to the Galaxy,Lig Lury Jr,0.5063257217407227,4.814829618872357,0.5402927994728088,1.1890879353641042
+The Hitchhiker's Guide to the Galaxy,Lintilla,0.48551246523857117,1.233854575936455,0.5066788196563721,0.1356336097707527
+The Hitchhiker's Guide to the Galaxy,Loonquawl,0.4905512034893036,1.0880415049049648,0.5231997966766357,0.912282620068297
+The Hitchhiker's Guide to the Galaxy,Phouchg,0.45619919896125793,0.5018064406777262,0.5188992023468018,1.040299760549525
+The Hitchhiker's Guide to the Galaxy,The Lord,0.3071805536746979,0.539492840010531,0.5095160007476807,3.5966796013484585
+The Hitchhiker's Guide to the Galaxy,Lunkwill and Fook,0.47032201290130615,0.7222041232070177,0.50676429271698,1.3777189736983275
+The Hitchhiker's Guide to the Galaxy,Majikthise,0.5712370276451111,10.386016033098292,0.5231562852859497,2.245254897374209
+The Hitchhiker's Guide to the Galaxy,Vroomfondel,0.49203070998191833,6.379823034810391,0.5519132018089294,5.594354917271374
+The Hitchhiker's Guide to the Galaxy,Max Quordlepleen,0.4683612883090973,0.6472498624723172,0.49733641743659973,3.2147789399329074
+The Hitchhiker's Guide to the Galaxy,Murray Bost Henson,0.5009537935256958,0.8650247165365152,0.5307431817054749,2.1319046443278546
+The Hitchhiker's Guide to the Galaxy,Old Man on the Pole,0.4790807068347931,1.1817041078963724,0.5179263353347778,5.0785527838217
+The Hitchhiker's Guide to the Galaxy,Old Thrashbarg,0.47178563475608826,0.734133117188203,0.5298166275024414,2.8792323369555364
+The Hitchhiker's Guide to the Galaxy,Old Woman in the Cave,0.48352083563804626,0.8531443032854303,0.5800648927688599,10.905864690592555
+The Hitchhiker's Guide to the Galaxy,Oolon Colluphid,0.5109764337539673,3.494924471198232,0.523297131061554,2.332563684862649
+The Hitchhiker's Guide to the Galaxy,Paul Neil Milne Johnstone,0.3960185945034027,0.5478944495090049,0.5309251546859741,3.0383673200038266
+The Hitchhiker's Guide to the Galaxy,Poodoo,0.322559654712677,0.43452957437335993,0.48707062005996704,1.1780212850157155
+The Hitchhiker's Guide to the Galaxy,Prak,0.3869447112083435,1.1668621031031616,0.5115901231765747,0.5061764822984539
+The Hitchhiker's Guide to the Galaxy,Mr Prosser,0.5088934302330017,4.318724304727283,0.5321364998817444,6.693953706336869
+The Hitchhiker's Guide to the Galaxy,Prostetnic Vogon Jeltz,0.49882662296295166,0.8692090518030713,0.5272074341773987,5.493615467566085
+The Hitchhiker's Guide to the Galaxy,Questular Rontok,0.4900446832180023,1.1360314227027488,0.4767529368400574,4.434786665064095
+The Hitchhiker's Guide to the Galaxy,Random Dent,0.4872552454471588,0.730590806312388,0.5017336010932922,0.5736767084024997
+The Hitchhiker's Guide to the Galaxy,Reg Nullify,0.47189655900001526,0.732566560752424,0.5031888484954834,0.9387981627860931
+The Hitchhiker's Guide to the Galaxy,Rob McKenna,0.48938116431236267,0.5788952977260052,0.4645346999168396,0.18472439351238784
+The Hitchhiker's Guide to the Galaxy,Roosta,0.4013340473175049,0.5160362064200719,0.5600486993789673,7.561060928517275
+The Hitchhiker's Guide to the Galaxy,Ruler of the Universe,0.5010707974433899,1.112097138724288,0.5338650941848755,3.4558857645636705
+The Hitchhiker's Guide to the Galaxy,Russell,0.3510933518409729,0.14252382210750555,0.4901808202266693,1.2995699622920482
diff --git a/ressources/images/Transformer.png b/ressources/images/Transformer.png
new file mode 100644
index 0000000000000000000000000000000000000000..371d78ebcfaec3530abe9180be5bed72b711df12
Binary files /dev/null and b/ressources/images/Transformer.png differ