diff --git a/requirements.txt b/requirements.txt index ccfd1df..83d6966 100644 --- a/requirements.txt +++ b/requirements.txt @@ -17,7 +17,11 @@ chromadb==0.4.24 qdrant-client git+https://github.com/sdan/surya.git beautifulsoup4==4.12.3 -llama-cpp-python==0.2.58 +llama-cpp-python==0.2.59 huggingface_hub fastapi==0.110.1 -python-multipart \ No newline at end of file +python-multipart +lancedb +langchain==0.1.14 +langchain-community +faiss-cpu=1.8.0 \ No newline at end of file diff --git a/tests/benchtest.py b/tests/benchtest.py index 251bf82..a5dff08 100644 --- a/tests/benchtest.py +++ b/tests/benchtest.py @@ -16,6 +16,14 @@ from qdrant_client.models import Distance, VectorParams, PointStruct from sentence_transformers import SentenceTransformer +from langchain_community.embeddings import HuggingFaceEmbeddings + +from langchain.document_loaders import TextLoader +from langchain.vectorstores import LanceDB, Lantern, FAISS +from langchain.text_splitter import CharacterTextSplitter + +from langchain.docstore.document import Document + def main(queries, corpuss, top_k, token_counts) -> pd.DataFrame: """Run the VLite benchmark. @@ -84,7 +92,7 @@ def main(queries, corpuss, top_k, token_counts) -> pd.DataFrame: results.append( { - "num_embeddings": len(results_retrieve), + "num_embeddings": len(result_add), "lib": "VLite", "k": top_k, "avg_time": np.mean(times), @@ -95,6 +103,101 @@ def main(queries, corpuss, top_k, token_counts) -> pd.DataFrame: print(json.dumps(results[-1], indent=2)) print("Done VLite benchmark.") + + ################################################# + # LanceDB # + ################################################# + print("Begin LanceDB benchmark.") + print("Adding documents to LanceDB instance...") + t0 = time.time() + + embeddings = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1") + documents = [Document(page_content=text) for text in corpus] + docsearch = LanceDB.from_documents(documents, embeddings) + + t1 = time.time() + print(f"Took {t1 - t0:.3f}s to add documents.") + indexing_times.append( + { + "num_tokens": token_count, + "lib": "LanceDB", + "num_embeddings": len(documents), + "indexing_time": t1 - t0, + } + ) + + print("Starting LanceDB trials...") + times = [] + for query in queries: + t0 = time.time() + docs = docsearch.similarity_search(query, k=top_k) + t1 = time.time() + times.append(t1 - t0) + + print(f"Top {top_k} results for query '{query}':") + for doc in docs: + print(f"Text: {doc.page_content}\n---") + + results.append( + { + "num_embeddings": len(documents), + "lib": "LanceDB", + "k": top_k, + "avg_time": np.mean(times), + "stddev_time": np.std(times), + } + ) + + print(json.dumps(results[-1], indent=2)) + print("Done LanceDB benchmark.") + + ################################################# + # FAISS # + ################################################# + print("Begin FAISS benchmark.") + print("Adding documents to FAISS instance...") + t0 = time.time() + + embeddings = HuggingFaceEmbeddings(model_name="mixedbread-ai/mxbai-embed-large-v1") + documents = [Document(page_content=text) for text in corpus] + db = FAISS.from_documents(documents, embeddings) + + t1 = time.time() + print(f"Took {t1 - t0:.3f}s to add documents.") + indexing_times.append( + { + "num_tokens": token_count, + "lib": "FAISS", + "num_embeddings": len(corpus), + "indexing_time": t1 - t0, + } + ) + + print("Starting FAISS trials...") + times = [] + for query in queries: + t0 = time.time() + docs = db.similarity_search(query, k=top_k) + t1 = time.time() + times.append(t1 - t0) + + print(f"Top {top_k} results for query '{query}':") + for doc in docs: + print(f"Text: {doc.page_content}\n---") + + results.append( + { + "num_embeddings": len(corpus), + "lib": "FAISS", + "k": top_k, + "avg_time": np.mean(times), + "stddev_time": np.std(times), + } + ) + + print(json.dumps(results[-1], indent=2)) + print("Done FAISS benchmark.") + ################################################# # Chroma # ################################################# @@ -104,7 +207,7 @@ def main(queries, corpuss, top_k, token_counts) -> pd.DataFrame: chroma_client = chromadb.Client() sentence_transformer_ef = embedding_functions.SentenceTransformerEmbeddingFunction(model_name="mixedbread-ai/mxbai-embed-large-v1") - collection = chroma_client.create_collection(name="my_collection", embedding_function=sentence_transformer_ef) + collection = chroma_client.get_or_create_collection(name="benchtest", embedding_function=sentence_transformer_ef) ids = [str(i) for i in range(len(corpus))] try: collection.add(documents=corpus, ids=ids) @@ -176,10 +279,10 @@ def main(queries, corpuss, top_k, token_counts) -> pd.DataFrame: points=[ PointStruct( id=idx, - vector=model.encode(vector).tolist(), - payload={"text": corpus[idx]} + vector=model.encode(text).tolist(), + payload={"text": text} ) - for idx, vector in enumerate(corpus) + for idx, text in enumerate(corpus) ] ) except Exception as e: @@ -203,7 +306,7 @@ def main(queries, corpuss, top_k, token_counts) -> pd.DataFrame: for i in range(len(query)): t0 = time.time() - query_vector = model.encode(query[i]).tolist() + query_vector = model.encode(query).tolist() try: hits = qdrant_client.search( collection_name="my_collection", diff --git a/tests/notebook2.ipynb b/tests/notebook2.ipynb new file mode 100644 index 0000000..b894824 --- /dev/null +++ b/tests/notebook2.ipynb @@ -0,0 +1,4338 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/sdan/miniforge3/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n", + "llama_model_loader: loaded meta data with 23 key-value pairs and 389 tensors from /Users/sdan/.cache/huggingface/hub/models--mixedbread-ai--mxbai-embed-large-v1/snapshots/456b7cfe38ad0f470a4e2074798eae56e5904bf6/gguf/mxbai-embed-large-v1-f16.gguf (version GGUF V3 (latest))\n", + "llama_model_loader: Dumping metadata keys/values. Note: KV overrides do not apply in this output.\n", + "llama_model_loader: - kv 0: general.architecture str = bert\n", + "llama_model_loader: - kv 1: general.name str = mxbai-embed-large-v1\n", + "llama_model_loader: - kv 2: bert.block_count u32 = 24\n", + "llama_model_loader: - kv 3: bert.context_length u32 = 512\n", + "llama_model_loader: - kv 4: bert.embedding_length u32 = 1024\n", + "llama_model_loader: - kv 5: bert.feed_forward_length u32 = 4096\n", + "llama_model_loader: - kv 6: bert.attention.head_count u32 = 16\n", + "llama_model_loader: - kv 7: bert.attention.layer_norm_epsilon f32 = 0.000000\n", + "llama_model_loader: - kv 8: general.file_type u32 = 1\n", + "llama_model_loader: - kv 9: bert.attention.causal bool = false\n", + "llama_model_loader: - kv 10: bert.pooling_type u32 = 2\n", + "llama_model_loader: - kv 11: tokenizer.ggml.token_type_count u32 = 2\n", + "llama_model_loader: - kv 12: tokenizer.ggml.bos_token_id u32 = 101\n", + "llama_model_loader: - kv 13: tokenizer.ggml.eos_token_id u32 = 102\n", + "llama_model_loader: - kv 14: tokenizer.ggml.model str = bert\n", + "llama_model_loader: - kv 15: tokenizer.ggml.tokens arr[str,30522] = [\"[PAD]\", \"[unused0]\", \"[unused1]\", \"...\n", + "llama_model_loader: - kv 16: tokenizer.ggml.scores arr[f32,30522] = [-1000.000000, -1000.000000, -1000.00...\n", + "llama_model_loader: - kv 17: tokenizer.ggml.token_type arr[i32,30522] = [3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, ...\n", + "llama_model_loader: - kv 18: tokenizer.ggml.unknown_token_id u32 = 100\n", + "llama_model_loader: - kv 19: tokenizer.ggml.seperator_token_id u32 = 102\n", + "llama_model_loader: - kv 20: tokenizer.ggml.padding_token_id u32 = 0\n", + "llama_model_loader: - kv 21: tokenizer.ggml.cls_token_id u32 = 101\n", + "llama_model_loader: - kv 22: tokenizer.ggml.mask_token_id u32 = 103\n", + "llama_model_loader: - type f32: 243 tensors\n", + "llama_model_loader: - type f16: 146 tensors\n", + "llm_load_vocab: mismatch in special tokens definition ( 7104/30522 vs 5/30522 ).\n", + "llm_load_print_meta: format = GGUF V3 (latest)\n", + "llm_load_print_meta: arch = bert\n", + "llm_load_print_meta: vocab type = WPM\n", + "llm_load_print_meta: n_vocab = 30522\n", + "llm_load_print_meta: n_merges = 0\n", + "llm_load_print_meta: n_ctx_train = 512\n", + "llm_load_print_meta: n_embd = 1024\n", + "llm_load_print_meta: n_head = 16\n", + "llm_load_print_meta: n_head_kv = 16\n", + "llm_load_print_meta: n_layer = 24\n", + "llm_load_print_meta: n_rot = 64\n", + "llm_load_print_meta: n_embd_head_k = 64\n", + "llm_load_print_meta: n_embd_head_v = 64\n", + "llm_load_print_meta: n_gqa = 1\n", + "llm_load_print_meta: n_embd_k_gqa = 1024\n", + "llm_load_print_meta: n_embd_v_gqa = 1024\n", + "llm_load_print_meta: f_norm_eps = 1.0e-12\n", + "llm_load_print_meta: f_norm_rms_eps = 0.0e+00\n", + "llm_load_print_meta: f_clamp_kqv = 0.0e+00\n", + "llm_load_print_meta: f_max_alibi_bias = 0.0e+00\n", + "llm_load_print_meta: f_logit_scale = 0.0e+00\n", + "llm_load_print_meta: n_ff = 4096\n", + "llm_load_print_meta: n_expert = 0\n", + "llm_load_print_meta: n_expert_used = 0\n", + "llm_load_print_meta: causal attn = 0\n", + "llm_load_print_meta: pooling type = 2\n", + "llm_load_print_meta: rope type = 2\n", + "llm_load_print_meta: rope scaling = linear\n", + "llm_load_print_meta: freq_base_train = 10000.0\n", + "llm_load_print_meta: freq_scale_train = 1\n", + "llm_load_print_meta: n_yarn_orig_ctx = 512\n", + "llm_load_print_meta: rope_finetuned = unknown\n", + "llm_load_print_meta: ssm_d_conv = 0\n", + "llm_load_print_meta: ssm_d_inner = 0\n", + "llm_load_print_meta: ssm_d_state = 0\n", + "llm_load_print_meta: ssm_dt_rank = 0\n", + "llm_load_print_meta: model type = ?B\n", + "llm_load_print_meta: model ftype = F16\n", + "llm_load_print_meta: model params = 334.09 M\n", + "llm_load_print_meta: model size = 637.85 MiB (16.02 BPW) \n", + "llm_load_print_meta: general.name = mxbai-embed-large-v1\n", + "llm_load_print_meta: BOS token = 101 '[CLS]'\n", + "llm_load_print_meta: EOS token = 102 '[SEP]'\n", + "llm_load_print_meta: UNK token = 100 '[UNK]'\n", + "llm_load_print_meta: SEP token = 102 '[SEP]'\n", + "llm_load_print_meta: PAD token = 0 '[PAD]'\n", + "llm_load_tensors: ggml ctx size = 0.15 MiB\n", + "llm_load_tensors: offloading 0 repeating layers to GPU\n", + "llm_load_tensors: offloaded 0/25 layers to GPU\n", + "llm_load_tensors: CPU buffer size = 637.85 MiB\n", + "................................................................................\n", + "llama_new_context_with_model: n_ctx = 512\n", + "llama_new_context_with_model: n_batch = 512\n", + "llama_new_context_with_model: n_ubatch = 512\n", + "llama_new_context_with_model: freq_base = 10000.0\n", + "llama_new_context_with_model: freq_scale = 1\n", + "llama_kv_cache_init: CPU KV buffer size = 48.00 MiB\n", + "llama_new_context_with_model: KV self size = 48.00 MiB, K (f16): 24.00 MiB, V (f16): 24.00 MiB\n", + "llama_new_context_with_model: CPU output buffer size = 0.00 MiB\n", + "llama_new_context_with_model: CPU compute buffer size = 25.01 MiB\n", + "llama_new_context_with_model: graph nodes = 849\n", + "llama_new_context_with_model: graph splits = 1\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Downloaded model to /Users/sdan/.cache/huggingface/hub/models--mixedbread-ai--mxbai-embed-large-v1/snapshots/456b7cfe38ad0f470a4e2074798eae56e5904bf6/gguf/mxbai-embed-large-v1-f16.gguf\n", + "Collection file vlite_20240403_161712.npz.npz not found. Initializing empty attributes.\n", + "Not using OCR for data/attention2.pdf\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "AVX = 0 | AVX_VNNI = 0 | AVX2 = 0 | AVX512 = 0 | AVX512_VBMI = 0 | AVX512_VNNI = 0 | FMA = 0 | NEON = 1 | ARM_FMA = 1 | F16C = 0 | FP16_VA = 1 | WASM_SIMD = 0 | BLAS = 1 | SSE3 = 0 | SSSE3 = 0 | VSX = 0 | MATMUL_INT8 = 0 | \n", + "Model metadata: {'tokenizer.ggml.cls_token_id': '101', 'tokenizer.ggml.padding_token_id': '0', 'tokenizer.ggml.seperator_token_id': '102', 'tokenizer.ggml.unknown_token_id': '100', 'tokenizer.ggml.token_type_count': '2', 'general.file_type': '1', 'tokenizer.ggml.eos_token_id': '102', 'bert.context_length': '512', 'bert.pooling_type': '2', 'tokenizer.ggml.bos_token_id': '101', 'bert.attention.head_count': '16', 'bert.feed_forward_length': '4096', 'tokenizer.ggml.mask_token_id': '103', 'tokenizer.ggml.model': 'bert', 'bert.attention.causal': 'false', 'general.name': 'mxbai-embed-large-v1', 'bert.block_count': '24', 'bert.attention.layer_norm_epsilon': '0.000000', 'bert.embedding_length': '1024', 'general.architecture': 'bert'}\n", + "Using fallback chat format: None\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Lenght of text: 1\n", + "Original text: ['Attention Is All You Need\\nAshish Vaswani⇤\\nGoogle Brain\\navaswani@google.comNoam Shazeer⇤\\nGoogle Brain\\nnoam@google.comNiki Parmar⇤\\nGoogle Research\\nnikip@google.comJakob Uszkoreit⇤\\nGoogle Research\\nusz@google.com\\nLlion Jones⇤\\nGoogle Research\\nllion@google.comAidan N. Gomez⇤†\\nUniversity of Toronto\\naidan@cs.toronto.eduŁukasz Kaiser⇤\\nGoogle Brain\\nlukaszkaiser@google.com\\nIllia Polosukhin⇤‡\\nillia.polosukhin@gmail.com\\nAbstract\\nThe dominant sequence transduction models are based on complex recurrent or\\nconvolutional neural networks that include an encoder and a decoder. The best\\nperforming models also connect the encoder and decoder through an attention\\nmechanism. We propose a new simple network architecture, the Transformer,\\nbased solely on attention mechanisms, dispensing with recurrence and convolutions\\nentirely. Experiments on two machine translation tasks show these models to\\nbe superior in quality while being more parallelizable and requiring significantly\\nless time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-\\nto-German translation task, improving over the existing best results, including\\nensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task,\\nour model establishes a new single-model state-of-the-art BLEU score of 41.0 after\\ntraining for 3.5 days on eight GPUs, a small fraction of the training costs of the\\nbest models from the literature.\\n1 Introduction\\nRecurrent neural networks, long short-term memory [ 12] and gated recurrent [ 7] neural networks\\nin particular, have been firmly established as state of the art approaches in sequence modeling and\\ntransduction problems such as language modeling and machine translation [ 29,2,5]. Numerous\\nefforts have since continued to push the boundaries of recurrent language models and encoder-decoder\\narchitectures [31, 21, 13].\\n⇤Equal contribution. Listing order is random. Jakob proposed replacing RNNs with self-attention and started\\nthe effort to evaluate this idea. Ashish, with Illia, designed and implemented the first Transformer models and\\nhas been crucially involved in every aspect of this work. Noam proposed scaled dot-product attention, multi-head\\nattention and the parameter-free position representation and became the other person involved in nearly every\\ndetail. Niki designed, implemented, tuned and evaluated countless model variants in our original codebase and\\ntensor2tensor. Llion also experimented with novel model variants, was responsible for our initial codebase, and\\nefficient inference and visualizations. Lukasz and Aidan spent countless long days designing various parts of and\\nimplementing tensor2tensor, replacing our earlier codebase, greatly improving results and massively accelerating\\nour research.\\n†Work performed while at Google Brain.\\n‡Work performed while at Google Research.\\n31st Conference on Neural Information Processing Systems (NIPS 2017), Long Beach, CA, USA.Recurrent models typically factor computation along the symbol positions of the input and output\\nsequences. Aligning the positions to steps in computation time, they generate a sequence of hidden\\nstates ht, as a function of the previous hidden state ht\\x001and the input for position t. This inherently\\nsequential nature precludes parallelization within training examples, which becomes critical at longer\\nsequence lengths, as memory constraints limit batching across examples. Recent work has achieved\\nsignificant improvements in computational efficiency through factorization tricks [ 18] and conditional\\ncomputation [ 26], while also improving model performance in case of the latter. The fundamental\\nconstraint of sequential computation, however, remains.\\nAttention mechanisms have become an integral part of compelling sequence modeling and transduc-\\ntion models in various tasks, allowing modeling of dependencies without regard to their distance in\\nthe input or output sequences [ 2,16]. In all but a few cases [ 22], however, such attention mechanisms\\nare used in conjunction with a recurrent network.\\nIn this work we propose the Transformer, a model architecture eschewing recurrence and instead\\nrelying entirely on an attention mechanism to draw global dependencies between input and output.\\nThe Transformer allows for significantly more parallelization and can reach a new state of the art in\\ntranslation quality after being trained for as little as twelve hours on eight P100 GPUs.\\n2 Background\\nThe goal of reducing sequential computation also forms the foundation of the Extended Neural GPU\\n[20], ByteNet [ 15] and ConvS2S [ 8], all of which use convolutional neural networks as basic building\\nblock, computing hidden representations in parallel for all input and output positions. In these models,\\nthe number of operations required to relate signals from two arbitrary input or output positions grows\\nin the distance between positions, linearly for ConvS2S and logarithmically for ByteNet. This makes\\nit more difficult to learn dependencies between distant positions [ 11]. In the Transformer this is\\nreduced to a constant number of operations, albeit at the cost of reduced effective resolution due\\nto averaging attention-weighted positions, an effect we counteract with Multi-Head Attention as\\ndescribed in section 3.2.\\nSelf-attention, sometimes called intra-attention is an attention mechanism relating different positions\\nof a single sequence in order to compute a representation of the sequence. Self-attention has been\\nused successfully in a variety of tasks including reading comprehension, abstractive summarization,\\ntextual entailment and learning task-independent sentence representations [4, 22, 23, 19].\\nEnd-to-end memory networks are based on a recurrent attention mechanism instead of sequence-\\naligned recurrence and have been shown to perform well on simple-language question answering and\\nlanguage modeling tasks [28].\\nTo the best of our knowledge, however, the Transformer is the first transduction model relying\\nentirely on self-attention to compute representations of its input and output without using sequence-\\naligned RNNs or convolution. In the following sections, we will describe the Transformer, motivate\\nself-attention and discuss its advantages over models such as [14, 15] and [8].\\n3 Model Architecture\\nMost competitive neural sequence transduction models have an encoder-decoder structure [ 5,2,29].\\nHere, the encoder maps an input sequence of symbol representations (x1,. . . ,x n)to a sequence\\nof continuous representations z=( z1,. . . ,z n). Given z, the decoder then generates an output\\nsequence (y1,. . . ,y m)of symbols one element at a time. At each step the model is auto-regressive\\n[9], consuming the previously generated symbols as additional input when generating the next.\\nThe Transformer follows this overall architecture using stacked self-attention and point-wise, fully\\nconnected layers for both the encoder and decoder, shown in the left and right halves of Figure 1,\\nrespectively.\\n3.1 Encoder and Decoder Stacks\\nEncoder: The encoder is composed of a stack of N =6 identical layers. Each layer has two\\nsub-layers. The first is a multi-head self-attention mechanism, and the second is a simple, position-\\n2']\n", + "Chopped text into this chunk: ['Attention Is All You Need\\nAshish Vaswani⇤\\nGoogle Brain\\navaswani@google.comNoam Shazeer⇤\\nGoogle Brain\\nnoam@google.comNiki Parmar⇤\\nGoogle Research\\nnikip@google.comJakob Uszkoreit⇤\\nGoogle Research\\nusz@google.com\\nLlion Jones⇤\\nGoogle Research\\nllion@google.comAidan N. Gomez⇤†\\nUniversity of Toronto\\naidan@cs.toronto.eduŁukasz Kaiser⇤\\nGoogle Brain\\nlukaszkaiser@google.com\\nIllia Polosukhin⇤‡\\nillia.polosukhin@gmail.com\\nAbstract\\nThe dominant sequence transduction models are based on complex recurrent or\\nconvolutional neural networks that include an encoder and a decoder. The best\\nperforming models also connect the encoder and decoder through an attention\\nmechanism. We propose a new simple network architecture, the Transformer,\\nbased solely on attention mechanisms, dispensing with recurrence and convolutions\\nentirely. Experiments on two machine translation tasks show these models to\\nbe superior in quality while being more parallelizable and requiring significantly\\nless time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-\\nto-German translation task, improving over the existing best results, including\\nensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task,\\nour model establishes a new single-model state-of-the-art BLEU score of 41.0 after\\ntraining for 3.5 days on eight GPUs, a small fraction of the training costs of the\\nbest models from the literature.\\n1 Introduction\\nRecurrent neural networks, long short-term memory [ 12] and gated recurrent [ 7] neural networks\\nin particular, have been firmly established as state of the art approaches in sequence modeling and\\ntransduction problems such as language modeling and machine translation [ 29,2,5]. Numerous\\nefforts have since continued to push the boundaries of recurrent language models and encoder-decoder\\narchitectures [31, 21, 13].\\n⇤Equal contribution. Listing order is random. Jakob proposed replacing RNNs with self-attention and started\\nthe effort to evaluate this idea. Ashish, with Illia, designed and implemented', ' the first Transformer models and\\nhas been crucially involved in every aspect of this work. Noam proposed scaled dot-product attention, multi-head\\nattention and the parameter-free position representation and became the other person involved in nearly every\\ndetail. Niki designed, implemented, tuned and evaluated countless model variants in our original codebase and\\ntensor2tensor. Llion also experimented with novel model variants, was responsible for our initial codebase, and\\nefficient inference and visualizations. Lukasz and Aidan spent countless long days designing various parts of and\\nimplementing tensor2tensor, replacing our earlier codebase, greatly improving results and massively accelerating\\nour research.\\n†Work performed while at Google Brain.\\n‡Work performed while at Google Research.\\n31st Conference on Neural Information Processing Systems (NIPS 2017), Long Beach, CA, USA.Recurrent models typically factor computation along the symbol positions of the input and output\\nsequences. Aligning the positions to steps in computation time, they generate a sequence of hidden\\nstates ht, as a function of the previous hidden state ht\\x001and the input for position t. This inherently\\nsequential nature precludes parallelization within training examples, which becomes critical at longer\\nsequence lengths, as memory constraints limit batching across examples. Recent work has achieved\\nsignificant improvements in computational efficiency through factorization tricks [ 18] and conditional\\ncomputation [ 26], while also improving model performance in case of the latter. The fundamental\\nconstraint of sequential computation, however, remains.\\nAttention mechanisms have become an integral part of compelling sequence modeling and transduc-\\ntion models in various tasks, allowing modeling of dependencies without regard to their distance in\\nthe input or output sequences [ 2,16]. In all but a few cases [ 22], however, such attention mechanisms\\nare used in conjunction with a recurrent network.\\nIn this work we propose the Transformer, a model architecture eschewing recurrence and instead\\nrelying entirely on an attention mechanism to draw global dependencies between input and output.\\nThe Transformer allows for significantly more parallelization and can reach a new state of the art in\\ntranslation quality after being trained for as little as twelve hours on eight P100 GPUs.\\n2 Background\\nThe goal of reducing sequential computation also forms the foundation of the Extended Neural GPU\\n[20], ByteNet [ 15] and ConvS2S [ 8], all of which use convolutional neural networks as basic building\\n', 'block, computing hidden representations in parallel for all input and output positions. In these models,\\nthe number of operations required to relate signals from two arbitrary input or output positions grows\\nin the distance between positions, linearly for ConvS2S and logarithmically for ByteNet. This makes\\nit more difficult to learn dependencies between distant positions [ 11]. In the Transformer this is\\nreduced to a constant number of operations, albeit at the cost of reduced effective resolution due\\nto averaging attention-weighted positions, an effect we counteract with Multi-Head Attention as\\ndescribed in section 3.2.\\nSelf-attention, sometimes called intra-attention is an attention mechanism relating different positions\\nof a single sequence in order to compute a representation of the sequence. Self-attention has been\\nused successfully in a variety of tasks including reading comprehension, abstractive summarization,\\ntextual entailment and learning task-independent sentence representations [4, 22, 23, 19].\\nEnd-to-end memory networks are based on a recurrent attention mechanism instead of sequence-\\naligned recurrence and have been shown to perform well on simple-language question answering and\\nlanguage modeling tasks [28].\\nTo the best of our knowledge, however, the Transformer is the first transduction model relying\\nentirely on self-attention to compute representations of its input and output without using sequence-\\naligned RNNs or convolution. In the following sections, we will describe the Transformer, motivate\\nself-attention and discuss its advantages over models such as [14, 15] and [8].\\n3 Model Architecture\\nMost competitive neural sequence transduction models have an encoder-decoder structure [ 5,2,29].\\nHere, the encoder maps an input sequence of symbol representations (x1,. . . ,x n)to a sequence\\nof continuous representations z=( z1,. . . ,z n). Given z, the decoder then generates an output\\nsequence (y1,. . . ,y m)of symbols one element at a time. At each step the model is auto-regressive\\n[9], consuming the previously generated symbols as additional input when generating the next.\\nThe Transformer follows this overall architecture using stacked self-attention and point-wise, fully\\nconnected layers for both the encoder and decoder, shown in the left and right halves of Figure 1,\\nrespectively.\\n3.1 Encoder and Decoder Stacks\\nEncoder: The encoder is composed of a stack of N =6 identical layers. Each layer has two', '\\nsub-layers. The first is a multi-head self-attention mechanism, and the second is a simple, position-\\n2']\n", + "Chopped text into 4 chunks.\n", + "Adding text to the collection...\n", + "Lenght of text: 1\n", + "Original text: ['Attention Is All You Need\\nAshish Vaswani⇤\\nGoogle Brain\\navaswani@google.comNoam Shazeer⇤\\nGoogle Brain\\nnoam@google.comNiki Parmar⇤\\nGoogle Research\\nnikip@google.comJakob Uszkoreit⇤\\nGoogle Research\\nusz@google.com\\nLlion Jones⇤\\nGoogle Research\\nllion@google.comAidan N. Gomez⇤†\\nUniversity of Toronto\\naidan@cs.toronto.eduŁukasz Kaiser⇤\\nGoogle Brain\\nlukaszkaiser@google.com\\nIllia Polosukhin⇤‡\\nillia.polosukhin@gmail.com\\nAbstract\\nThe dominant sequence transduction models are based on complex recurrent or\\nconvolutional neural networks that include an encoder and a decoder. The best\\nperforming models also connect the encoder and decoder through an attention\\nmechanism. We propose a new simple network architecture, the Transformer,\\nbased solely on attention mechanisms, dispensing with recurrence and convolutions\\nentirely. Experiments on two machine translation tasks show these models to\\nbe superior in quality while being more parallelizable and requiring significantly\\nless time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-\\nto-German translation task, improving over the existing best results, including\\nensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task,\\nour model establishes a new single-model state-of-the-art BLEU score of 41.0 after\\ntraining for 3.5 days on eight GPUs, a small fraction of the training costs of the\\nbest models from the literature.\\n1 Introduction\\nRecurrent neural networks, long short-term memory [ 12] and gated recurrent [ 7] neural networks\\nin particular, have been firmly established as state of the art approaches in sequence modeling and\\ntransduction problems such as language modeling and machine translation [ 29,2,5]. Numerous\\nefforts have since continued to push the boundaries of recurrent language models and encoder-decoder\\narchitectures [31, 21, 13].\\n⇤Equal contribution. Listing order is random. Jakob proposed replacing RNNs with self-attention and started\\nthe effort to evaluate this idea. Ashish, with Illia, designed and implemented']\n", + "Chopped text into this chunk: ['Attention Is All You Need\\nAshish Vaswani⇤\\nGoogle Brain\\navaswani@google.comNoam Shazeer⇤\\nGoogle Brain\\nnoam@google.comNiki Parmar⇤\\nGoogle Research\\nnikip@google.comJakob Uszkoreit⇤\\nGoogle Research\\nusz@google.com\\nLlion Jones⇤\\nGoogle Research\\nllion@google.comAidan N. Gomez⇤†\\nUniversity of Toronto\\naidan@cs.toronto.eduŁukasz Kaiser⇤\\nGoogle Brain\\nlukaszkaiser@google.com\\nIllia Polosukhin⇤‡\\nillia.polosukhin@gmail.com\\nAbstract\\nThe dominant sequence transduction models are based on complex recurrent or\\nconvolutional neural networks that include an encoder and a decoder. The best\\nperforming models also connect the encoder and decoder through an attention\\nmechanism. We propose a new simple network architecture, the Transformer,\\nbased solely on attention mechanisms, dispensing with recurrence and convolutions\\nentirely. Experiments on two machine translation tasks show these models to\\nbe superior in quality while being more parallelizable and requiring significantly\\nless time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-\\nto-German translation task, improving over the existing best results, including\\nensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task,\\nour model establishes a new single-model state-of-the-art BLEU score of 41.0 after\\ntraining for 3.5 days on eight GPUs, a small fraction of the training costs of the\\nbest models from the literature.\\n1 Introduction\\nRecurrent neural networks, long short-term memory [ 12] and gated recurrent [ 7] neural networks\\nin particular, have been firmly established as state of the art approaches in sequence modeling and\\ntransduction problems such as language modeling and machine translation [ 29,2,5]. Numerous\\nefforts have since continued to push the boundaries of recurrent language models and encoder-decoder\\narchitectures [31, 21, 13].\\n⇤Equal contribution. Listing order is random. Jakob proposed replacing RNNs with self-attention and started\\nthe effort to evaluate this idea. Ashish, with Illia, designed and implemented']\n", + "Chopped text into 1 chunks.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "llama_print_timings: load time = 610.02 ms\n", + "llama_print_timings: sample time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: prompt eval time = 572.50 ms / 469 tokens ( 1.22 ms per token, 819.22 tokens per second)\n", + "llama_print_timings: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: total time = 609.23 ms / 470 tokens\n", + "/Users/sdan/Developer/vlite/vlite/model.py:45: RuntimeWarning: invalid value encountered in divide\n", + " return ((embeddings - np.min(embeddings, axis=0)) / (np.max(embeddings, axis=0) - np.min(embeddings, axis=0)) * 255).astype(np.uint8)\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Lenght of text: 1\n", + "Original text: [' the first Transformer models and\\nhas been crucially involved in every aspect of this work. Noam proposed scaled dot-product attention, multi-head\\nattention and the parameter-free position representation and became the other person involved in nearly every\\ndetail. Niki designed, implemented, tuned and evaluated countless model variants in our original codebase and\\ntensor2tensor. Llion also experimented with novel model variants, was responsible for our initial codebase, and\\nefficient inference and visualizations. Lukasz and Aidan spent countless long days designing various parts of and\\nimplementing tensor2tensor, replacing our earlier codebase, greatly improving results and massively accelerating\\nour research.\\n†Work performed while at Google Brain.\\n‡Work performed while at Google Research.\\n31st Conference on Neural Information Processing Systems (NIPS 2017), Long Beach, CA, USA.Recurrent models typically factor computation along the symbol positions of the input and output\\nsequences. Aligning the positions to steps in computation time, they generate a sequence of hidden\\nstates ht, as a function of the previous hidden state ht\\x001and the input for position t. This inherently\\nsequential nature precludes parallelization within training examples, which becomes critical at longer\\nsequence lengths, as memory constraints limit batching across examples. Recent work has achieved\\nsignificant improvements in computational efficiency through factorization tricks [ 18] and conditional\\ncomputation [ 26], while also improving model performance in case of the latter. The fundamental\\nconstraint of sequential computation, however, remains.\\nAttention mechanisms have become an integral part of compelling sequence modeling and transduc-\\ntion models in various tasks, allowing modeling of dependencies without regard to their distance in\\nthe input or output sequences [ 2,16]. In all but a few cases [ 22], however, such attention mechanisms\\nare used in conjunction with a recurrent network.\\nIn this work we propose the Transformer, a model architecture eschewing recurrence and instead\\nrelying entirely on an attention mechanism to draw global dependencies between input and output.\\nThe Transformer allows for significantly more parallelization and can reach a new state of the art in\\ntranslation quality after being trained for as little as twelve hours on eight P100 GPUs.\\n2 Background\\nThe goal of reducing sequential computation also forms the foundation of the Extended Neural GPU\\n[20], ByteNet [ 15] and ConvS2S [ 8], all of which use convolutional neural networks as basic building\\n']\n", + "Chopped text into this chunk: [' the first Transformer models and\\nhas been crucially involved in every aspect of this work. Noam proposed scaled dot-product attention, multi-head\\nattention and the parameter-free position representation and became the other person involved in nearly every\\ndetail. Niki designed, implemented, tuned and evaluated countless model variants in our original codebase and\\ntensor2tensor. Llion also experimented with novel model variants, was responsible for our initial codebase, and\\nefficient inference and visualizations. Lukasz and Aidan spent countless long days designing various parts of and\\nimplementing tensor2tensor, replacing our earlier codebase, greatly improving results and massively accelerating\\nour research.\\n†Work performed while at Google Brain.\\n‡Work performed while at Google Research.\\n31st Conference on Neural Information Processing Systems (NIPS 2017), Long Beach, CA, USA.Recurrent models typically factor computation along the symbol positions of the input and output\\nsequences. Aligning the positions to steps in computation time, they generate a sequence of hidden\\nstates ht, as a function of the previous hidden state ht\\x001and the input for position t. This inherently\\nsequential nature precludes parallelization within training examples, which becomes critical at longer\\nsequence lengths, as memory constraints limit batching across examples. Recent work has achieved\\nsignificant improvements in computational efficiency through factorization tricks [ 18] and conditional\\ncomputation [ 26], while also improving model performance in case of the latter. The fundamental\\nconstraint of sequential computation, however, remains.\\nAttention mechanisms have become an integral part of compelling sequence modeling and transduc-\\ntion models in various tasks, allowing modeling of dependencies without regard to their distance in\\nthe input or output sequences [ 2,16]. In all but a few cases [ 22], however, such attention mechanisms\\nare used in conjunction with a recurrent network.\\nIn this work we propose the Transformer, a model architecture eschewing recurrence and instead\\nrelying entirely on an attention mechanism to draw global dependencies between input and output.\\nThe Transformer allows for significantly more parallelization and can reach a new state of the art in\\ntranslation quality after being trained for as little as twelve hours on eight P100 GPUs.\\n2 Background\\nThe goal of reducing sequential computation also forms the foundation of the Extended Neural GPU\\n[20], ByteNet [ 15] and ConvS2S [ 8], all of which use convolutional neural networks as basic building\\n']\n", + "Chopped text into 1 chunks.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "llama_print_timings: load time = 610.02 ms\n", + "llama_print_timings: sample time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: prompt eval time = 628.01 ms / 508 tokens ( 1.24 ms per token, 808.91 tokens per second)\n", + "llama_print_timings: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: total time = 628.27 ms / 509 tokens\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Lenght of text: 1\n", + "Original text: ['block, computing hidden representations in parallel for all input and output positions. In these models,\\nthe number of operations required to relate signals from two arbitrary input or output positions grows\\nin the distance between positions, linearly for ConvS2S and logarithmically for ByteNet. This makes\\nit more difficult to learn dependencies between distant positions [ 11]. In the Transformer this is\\nreduced to a constant number of operations, albeit at the cost of reduced effective resolution due\\nto averaging attention-weighted positions, an effect we counteract with Multi-Head Attention as\\ndescribed in section 3.2.\\nSelf-attention, sometimes called intra-attention is an attention mechanism relating different positions\\nof a single sequence in order to compute a representation of the sequence. Self-attention has been\\nused successfully in a variety of tasks including reading comprehension, abstractive summarization,\\ntextual entailment and learning task-independent sentence representations [4, 22, 23, 19].\\nEnd-to-end memory networks are based on a recurrent attention mechanism instead of sequence-\\naligned recurrence and have been shown to perform well on simple-language question answering and\\nlanguage modeling tasks [28].\\nTo the best of our knowledge, however, the Transformer is the first transduction model relying\\nentirely on self-attention to compute representations of its input and output without using sequence-\\naligned RNNs or convolution. In the following sections, we will describe the Transformer, motivate\\nself-attention and discuss its advantages over models such as [14, 15] and [8].\\n3 Model Architecture\\nMost competitive neural sequence transduction models have an encoder-decoder structure [ 5,2,29].\\nHere, the encoder maps an input sequence of symbol representations (x1,. . . ,x n)to a sequence\\nof continuous representations z=( z1,. . . ,z n). Given z, the decoder then generates an output\\nsequence (y1,. . . ,y m)of symbols one element at a time. At each step the model is auto-regressive\\n[9], consuming the previously generated symbols as additional input when generating the next.\\nThe Transformer follows this overall architecture using stacked self-attention and point-wise, fully\\nconnected layers for both the encoder and decoder, shown in the left and right halves of Figure 1,\\nrespectively.\\n3.1 Encoder and Decoder Stacks\\nEncoder: The encoder is composed of a stack of N =6 identical layers. Each layer has two']\n", + "Chopped text into this chunk: ['block, computing hidden representations in parallel for all input and output positions. In these models,\\nthe number of operations required to relate signals from two arbitrary input or output positions grows\\nin the distance between positions, linearly for ConvS2S and logarithmically for ByteNet. This makes\\nit more difficult to learn dependencies between distant positions [ 11]. In the Transformer this is\\nreduced to a constant number of operations, albeit at the cost of reduced effective resolution due\\nto averaging attention-weighted positions, an effect we counteract with Multi-Head Attention as\\ndescribed in section 3.2.\\nSelf-attention, sometimes called intra-attention is an attention mechanism relating different positions\\nof a single sequence in order to compute a representation of the sequence. Self-attention has been\\nused successfully in a variety of tasks including reading comprehension, abstractive summarization,\\ntextual entailment and learning task-independent sentence representations [4, 22, 23, 19].\\nEnd-to-end memory networks are based on a recurrent attention mechanism instead of sequence-\\naligned recurrence and have been shown to perform well on simple-language question answering and\\nlanguage modeling tasks [28].\\nTo the best of our knowledge, however, the Transformer is the first transduction model relying\\nentirely on self-attention to compute representations of its input and output without using sequence-\\naligned RNNs or convolution. In the following sections, we will describe the Transformer, motivate\\nself-attention and discuss its advantages over models such as [14, 15] and [8].\\n3 Model Architecture\\nMost competitive neural sequence transduction models have an encoder-decoder structure [ 5,2,29].\\nHere, the encoder maps an input sequence of symbol representations (x1,. . . ,x n)to a sequence\\nof continuous representations z=( z1,. . . ,z n). Given z, the decoder then generates an output\\nsequence (y1,. . . ,y m)of symbols one element at a time. At each step the model is auto-regressive\\n[9], consuming the previously generated symbols as additional input when generating the next.\\nThe Transformer follows this overall architecture using stacked self-attention and point-wise, fully\\nconnected layers for both the encoder and decoder, shown in the left and right halves of Figure 1,\\nrespectively.\\n3.1 Encoder and Decoder Stacks\\nEncoder: The encoder is composed of a stack of N =6 identical layers. Each layer has two']\n", + "Chopped text into 1 chunks.\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "llama_print_timings: load time = 610.02 ms\n", + "llama_print_timings: sample time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: prompt eval time = 581.07 ms / 512 tokens ( 1.13 ms per token, 881.13 tokens per second)\n", + "llama_print_timings: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: total time = 581.41 ms / 513 tokens\n", + "\n", + "llama_print_timings: load time = 610.02 ms\n", + "llama_print_timings: sample time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: prompt eval time = 102.58 ms / 29 tokens ( 3.54 ms per token, 282.71 tokens per second)\n", + "llama_print_timings: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: total time = 102.66 ms / 30 tokens\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Lenght of text: 1\n", + "Original text: ['\\nsub-layers. The first is a multi-head self-attention mechanism, and the second is a simple, position-\\n2']\n", + "Chopped text into this chunk: ['\\nsub-layers. The first is a multi-head self-attention mechanism, and the second is a simple, position-\\n2']\n", + "Chopped text into 1 chunks.\n", + "Saving collection to vlite_20240403_161712.npz.npz\n", + "Collection saved successfully.\n", + "Text added successfully.\n" + ] + }, + { + "data": { + "text/plain": [ + "[('b6c62336-83da-40c7-a80b-d9289eb5a65d',\n", + " [[0.022374463502880894,\n", + " 0.03836917131117475,\n", + " -0.05359219365750071,\n", + " -0.012213155719481675,\n", + " 0.003572489496419601,\n", + " -0.0032761355527294762,\n", + " 0.006455623966013962,\n", + " -0.024978054747860006,\n", + " -0.015776332392049123,\n", + " 0.03878345056583734,\n", + " 0.042356234555387805,\n", + " 0.016980522312466603,\n", + " 0.010231427181838134,\n", + " -0.047161371352357546,\n", + " -0.007769850869361796,\n", + " -0.024911000413904906,\n", + " -0.013477232027734244,\n", + " -0.03540494861683958,\n", + " -0.046793523569908124,\n", + " -0.02157853116171931,\n", + " -0.010462199674356548,\n", + " -0.0009660582280956271,\n", + " -0.038948239911879484,\n", + " 0.007148945056136985,\n", + " -0.025159200559408314,\n", + " 0.024882123401917224,\n", + " 0.01752759878121017,\n", + " 0.01912835331849783,\n", + " 0.0881376861797613,\n", + " 0.04671964166716273,\n", + " 0.030790990420063315,\n", + " -0.03700526266581818,\n", + " 0.031495044241428655,\n", + " -0.026222045456626375,\n", + " -0.014566127470290239,\n", + " -0.04328437011881968,\n", + " 0.012772120367020655,\n", + " -0.03322073060685461,\n", + " -0.02778360320847377,\n", + " -0.025492668563039562,\n", + " -0.002471283740438685,\n", + " -0.020914091254874773,\n", + " 0.02124487628425998,\n", + " -0.03665254832641079,\n", + " -0.07327181308922706,\n", + " 0.007912732596093933,\n", + " 0.03977598752227208,\n", + " -0.044065699605129696,\n", + " 0.004073667583914936,\n", + " -0.013708698623042678,\n", + " -0.021364951061384467,\n", + " 0.021364802563431793,\n", + " -1.711689181999334e-05,\n", + " 0.032194002413946106,\n", + " 0.02305537166582518,\n", + " -0.03633563033617908,\n", + " 0.01732841297118663,\n", + " 0.020431105835098145,\n", + " -0.018004906239942473,\n", + " -0.0058265632792645775,\n", + " 0.05176218831458045,\n", + " 0.027673788138214268,\n", + " 0.013628951885424881,\n", + " -0.08562252463134053,\n", + " -0.03540917997423258,\n", + " 0.017429503369597543,\n", + " 0.013843437990122658,\n", + " 0.00439531790356538,\n", + " 0.0013853718136312588,\n", + " -0.03325679391864186,\n", + " -0.07834305488035334,\n", + " -0.009185139806343064,\n", + " -0.0009652266812735689,\n", + " -0.03187609666205168,\n", + " 0.010161152611381258,\n", + " 0.011267373093168437,\n", + " -0.0027733135595277296,\n", + " 0.02519800691202919,\n", + " -0.0032784535390300417,\n", + " 0.02699402290895066,\n", + " 0.0422383939216187,\n", + " 0.02680330648777842,\n", + " -0.0361412748809082,\n", + " 0.011690401213166285,\n", + " -0.027503175670207776,\n", + " -0.060536655364291286,\n", + " 0.0590080875171657,\n", + " -0.00347423724427269,\n", + " -0.03475728063606543,\n", + " -0.02874915358191384,\n", + " -0.028348809775573962,\n", + " 0.01452448797695433,\n", + " -0.014608400999828571,\n", + " 0.043764435635032524,\n", + " 0.00952556468877957,\n", + " 0.040698483500535045,\n", + " 0.01360734710182735,\n", + " 0.013058663017607379,\n", + " 0.015217070748604797,\n", + " 0.010514789640314815,\n", + " 0.012316382653043117,\n", + " 0.07911161354273352,\n", + " -0.04206738768376017,\n", + " 0.03709854941339034,\n", + " -0.0480597606067979,\n", + " -0.03539585520288379,\n", + " 0.03060220447227582,\n", + " -0.02656941220122124,\n", + " -0.018650259988577784,\n", + " -0.033152374830168224,\n", + " -0.01586733327225991,\n", + " 0.0007548529478985736,\n", + " 0.030976402627847396,\n", + " 0.001346707076333227,\n", + " -0.027874243689158973,\n", + " 0.019187600664581528,\n", + " -0.022643967264785687,\n", + " 0.017863806488642407,\n", + " -0.050595261369185844,\n", + " 0.003390252266632057,\n", + " -0.018861479138316765,\n", + " -0.012303711940092608,\n", + " -0.003985434772282616,\n", + " 0.023343771375078474,\n", + " 0.029220659609393983,\n", + " -0.03483049179524931,\n", + " -0.011434741131186482,\n", + " 0.015913055625036154,\n", + " -0.017454517766299416,\n", + " 0.023907646466153582,\n", + " 0.027901263642480086,\n", + " 0.01240751117475255,\n", + " 0.03294457113334071,\n", + " 0.06788432071439793,\n", + " -0.037446461762822555,\n", + " -0.019397781997168122,\n", + " -0.01302873150330409,\n", + " 0.013755883430375298,\n", + " 0.012697210658220808,\n", + " -0.007186303136590222,\n", + " -0.02120032856697454,\n", + " -0.04026877714395774,\n", + " -0.0028911279141647646,\n", + " 0.08918837093002342,\n", + " -0.0038166351472601542,\n", + " 0.013697052379113978,\n", + " -0.031798647471409494,\n", + " -0.020864075809601604,\n", + " -0.03982472154700983,\n", + " 0.026588965544000673,\n", + " 0.002151227271004988,\n", + " 0.01860647144621893,\n", + " -0.012603129696879305,\n", + " 0.008595470287185474,\n", + " -0.062054638143869065,\n", + " -0.03240770598448879,\n", + " 0.004694788635745306,\n", + " -0.009254305647703864,\n", + " -0.012031440943867508,\n", + " 0.013926808745192195,\n", + " 0.011518561551404842,\n", + " 0.012400882993662756,\n", + " -0.06724106095374693,\n", + " 0.05354310924433531,\n", + " -0.022051066663770227,\n", + " 0.05620201013687781,\n", + " -0.010072786318443164,\n", + " -0.01719961185369723,\n", + " -0.012355452631242894,\n", + " -0.01390560273699566,\n", + " -0.013661346129819682,\n", + " 0.027977221179530252,\n", + " -0.0005207809122119466,\n", + " 0.025321201814676123,\n", + " 0.009771602437129457,\n", + " 0.03920823813629691,\n", + " 0.04144161397416545,\n", + " -0.001110602005649179,\n", + " 0.012800677858132641,\n", + " -0.008957192946216235,\n", + " -0.018779463218500825,\n", + " -0.028274036885112534,\n", + " -0.00024060036650276647,\n", + " 0.02491859383168707,\n", + " -0.011922387551309073,\n", + " -0.005455143620498484,\n", + " 0.006013981043667899,\n", + " 0.021389706838056403,\n", + " 0.017520420823992224,\n", + " 0.010390769656346574,\n", + " 0.016481117023563568,\n", + " 0.051602895061303514,\n", + " -0.028865092107075684,\n", + " 0.015528167285388476,\n", + " -0.04510969026790731,\n", + " 0.0410351367018253,\n", + " -0.012237954877577988,\n", + " 0.008251272055086394,\n", + " 0.03495579069697849,\n", + " -0.031101724888805086,\n", + " 0.0007326108953482452,\n", + " 0.058339633160050045,\n", + " -0.0001020891097119962,\n", + " 0.002473934595745522,\n", + " -0.045153905950444634,\n", + " 0.026207425916611578,\n", + " 0.024399331530033968,\n", + " 0.05040262448572679,\n", + " -0.046130595338851464,\n", + " 0.02749559726907251,\n", + " -0.02203376081247675,\n", + " 0.01983469968128982,\n", + " -0.009874604120987406,\n", + " 0.02025325701180243,\n", + " 0.03435836841672997,\n", + " -0.028180296301100752,\n", + " -0.05524508265579069,\n", + " 0.009140844869280433,\n", + " 0.02623402039827054,\n", + " -0.009798196918788419,\n", + " 0.019397798682331345,\n", + " 0.004590098413369327,\n", + " 0.007607767022536042,\n", + " -0.025445761563674583,\n", + " 0.009413526320520785,\n", + " 0.0018272940038967442,\n", + " 0.025866169278788473,\n", + " 0.043341947280064894,\n", + " 0.006498496492911949,\n", + " 0.02214744517208735,\n", + " 0.024490923065022804,\n", + " 0.01994978058906207,\n", + " -0.030059432775643435,\n", + " 0.0450745780104237,\n", + " 0.022356478565444295,\n", + " 0.028299118022467293,\n", + " 0.05446204794579942,\n", + " 0.061147866263426165,\n", + " 0.05032297285353936,\n", + " -0.00924293470896832,\n", + " -0.010360730522740506,\n", + " -0.009347026768242802,\n", + " 0.07233154407510041,\n", + " 0.03835968079033429,\n", + " 0.004596647757062899,\n", + " 0.02712945637882066,\n", + " 0.009428844968874566,\n", + " 0.002515307126469885,\n", + " -0.04671115225611556,\n", + " -0.020601307848089386,\n", + " -0.02844724222748367,\n", + " 0.05642713970719467,\n", + " 0.0017374059197905675,\n", + " -0.008562733996944629,\n", + " -0.06137059317023911,\n", + " -0.003005745391528535,\n", + " 0.0072051823987755005,\n", + " 0.05204942673647352,\n", + " -0.004894623081489087,\n", + " -0.08134025079424712,\n", + " -0.027201626383819514,\n", + " 0.03348537398071295,\n", + " 0.0013078468097786917,\n", + " -0.019569003473632204,\n", + " 0.019567887236212677,\n", + " 0.006924485812252556,\n", + " -0.0058793175940805775,\n", + " 0.0396765406124385,\n", + " -0.004118728787098521,\n", + " -0.01696165306137926,\n", + " -0.04503409646741537,\n", + " -0.0874504309807275,\n", + " -0.07847021584629806,\n", + " 0.006632657719086299,\n", + " -0.041783516327805234,\n", + " -0.07938718569850202,\n", + " 0.014054820654461212,\n", + " -0.03465207400788774,\n", + " 0.014533551357616322,\n", + " 0.005807380347108666,\n", + " -0.01333876269241607,\n", + " 0.003258319135441416,\n", + " -0.041953037586137026,\n", + " 0.06470408188589755,\n", + " 0.032823590351853296,\n", + " 0.024595831028778825,\n", + " -0.033308491228367765,\n", + " 0.04635324215688094,\n", + " -0.0031352122470304786,\n", + " 0.03899611298219499,\n", + " 0.025485233654308003,\n", + " -0.028039857282264262,\n", + " -0.02203660730132236,\n", + " -0.0037103673427015703,\n", + " 0.011644830695375358,\n", + " 0.021889816240847394,\n", + " -0.0003200581379500194,\n", + " 0.01438506675307436,\n", + " -0.03700251962498455,\n", + " 0.008482398273065107,\n", + " 0.028864861851823225,\n", + " -0.025564945353083034,\n", + " 0.014547276572882437,\n", + " 0.026017715610781606,\n", + " -0.004484442120042817,\n", + " 0.026279182129562536,\n", + " 0.010292263789718694,\n", + " -0.011681769978231733,\n", + " 0.030069824295297868,\n", + " 0.03799985857139447,\n", + " -0.03447561505868839,\n", + " 0.04619238383529382,\n", + " -0.004101349938215948,\n", + " 0.09188287128681562,\n", + " -0.02433710087676624,\n", + " 0.055216324108461876,\n", + " 0.0253054810538887,\n", + " -0.0017644768671417769,\n", + " 0.0038695020869279006,\n", + " -0.02864832514056551,\n", + " -0.03939895288895282,\n", + " -0.004938551362089923,\n", + " -0.020753346392397554,\n", + " -0.0392586239921936,\n", + " 0.017801639238994922,\n", + " 0.04019163829735148,\n", + " -0.029015815860521987,\n", + " -0.06876456318531843,\n", + " -0.0034116299234452346,\n", + " -0.048002710696710485,\n", + " -0.0772487284171687,\n", + " 0.004019122951085255,\n", + " -0.004128449146062365,\n", + " 0.0313626407971997,\n", + " 0.03794842155021482,\n", + " 0.009496246354224677,\n", + " -0.00500842548862948,\n", + " 0.012088748639726692,\n", + " -0.023852003115325756,\n", + " 0.044766296260675506,\n", + " 0.01179730305392729,\n", + " -0.028511133054491947,\n", + " 0.016387346406257986,\n", + " 0.02780491349894044,\n", + " 0.012785678730654554,\n", + " 0.0229556310971189,\n", + " 0.031117081913034278,\n", + " -0.015649671981001055,\n", + " -0.009521471818241305,\n", + " 0.041305646579072354,\n", + " 0.024685814114033096,\n", + " 0.053776975166115015,\n", + " -0.005586900097523516,\n", + " -0.009887350751430574,\n", + " 0.018043552414996424,\n", + " -0.027287630057645423,\n", + " -0.01646417490882832,\n", + " 0.010914237453389689,\n", + " 0.01715415980056518,\n", + " 0.02781256197776124,\n", + " -0.016631408631282433,\n", + " 0.04531466082401978,\n", + " 0.016706560274948993,\n", + " 0.045722706501702765,\n", + " -0.019274852388616415,\n", + " 0.011055192043769728,\n", + " 0.017809684824700394,\n", + " 0.0130136531213007,\n", + " 0.04065793521687383,\n", + " -0.05986012538817635,\n", + " 0.020572008701472196,\n", + " -0.01517560978651536,\n", + " -0.04422717194072338,\n", + " 0.0007679410506352983,\n", + " -0.02222265187679254,\n", + " -0.05555340778696325,\n", + " 0.07339418207629451,\n", + " -0.0033814372863375276,\n", + " 0.0030958561182814503,\n", + " -0.02501891838110611,\n", + " 0.02232081236054173,\n", + " 0.03654334059609255,\n", + " 0.01304242835379273,\n", + " 0.019370204759395406,\n", + " 0.034650105158627584,\n", + " 0.033367880398338844,\n", + " -0.0017982981101211087,\n", + " 0.016534674728988688,\n", + " 6.318629599124578e-05,\n", + " 0.0287777352665125,\n", + " 0.002723921722229969,\n", + " 0.02400433865553927,\n", + " -0.036660770774846405,\n", + " 0.013171557334739434,\n", + " -0.03547718869952396,\n", + " -0.023092894929394314,\n", + " 0.014131979523263335,\n", + " 0.01651244174899586,\n", + " 0.0020607133889957298,\n", + " -0.024346374490484827,\n", + " 0.0389481731712266,\n", + " -0.04570937839332133,\n", + " 0.0417499457794033,\n", + " 0.018052517353195408,\n", + " 0.012316898224586665,\n", + " -0.01605394013773066,\n", + " -0.05551844569594864,\n", + " 0.028956680305031873,\n", + " 0.012889369511753557,\n", + " 0.0004831539399251654,\n", + " 0.00209802725376643,\n", + " 0.0025832353458487187,\n", + " -0.020170587033588202,\n", + " -0.014282087594186761,\n", + " 0.01486207721590132,\n", + " -0.011786305862847912,\n", + " -0.024841553427543822,\n", + " -0.006186023430162484,\n", + " 0.035111056151853726,\n", + " 0.0004330438063507139,\n", + " -0.0025925573465406457,\n", + " 0.01909111870825243,\n", + " -0.08212411308833417,\n", + " 0.020997275136116225,\n", + " 0.03791676979558338,\n", + " -0.015562421925482491,\n", + " -0.0003939091731434529,\n", + " -0.03447706666788867,\n", + " 0.0037642879504267623,\n", + " 0.03362028353645779,\n", + " -0.0026540665750635767,\n", + " -0.027856000131692437,\n", + " 0.017911793018067827,\n", + " -0.02263486884528093,\n", + " -0.053002299722897786,\n", + " 0.05273794667087951,\n", + " 0.02138799160327722,\n", + " -0.025058555654855423,\n", + " 0.009000923924761975,\n", + " -0.026154644071276264,\n", + " -0.014151432755063437,\n", + " -0.004561183025506237,\n", + " 0.013660874773958671,\n", + " -0.0031951153200287874,\n", + " 0.03040710485872526,\n", + " -0.03326606753236044,\n", + " 0.011338791431564155,\n", + " 0.022372072518991235,\n", + " 0.016769101272252642,\n", + " -0.0039035957151968255,\n", + " 0.03150301307538331,\n", + " -0.03627687186537777,\n", + " 0.04652290023355116,\n", + " -0.005616304360669001,\n", + " -0.014212470419160817,\n", + " -0.015873737037904373,\n", + " -0.00030020800782978223,\n", + " 0.0030529660719620802,\n", + " 0.011262283284127678,\n", + " 0.017606210927728898,\n", + " -0.006679466693117351,\n", + " -0.039542662199780754,\n", + " 0.030568673968265625,\n", + " -0.010314379139310808,\n", + " -0.07887424373667727,\n", + " 0.0010888166051597726,\n", + " 0.013464759868226074,\n", + " 0.006092958595261179,\n", + " 0.023837517056616735,\n", + " -0.0021952049814018683,\n", + " -0.018924175307638375,\n", + " 0.0016504813292370932,\n", + " 0.01638415620305001,\n", + " -0.01825048848333641,\n", + " 0.020711898778438695,\n", + " -0.0032122999953243686,\n", + " 0.011652935513410267,\n", + " 0.013125955115138388,\n", + " -0.04890089305512688,\n", + " 0.013764362830324532,\n", + " -0.02595066461385915,\n", + " 0.012522685519438843,\n", + " -0.009092950942510895,\n", + " -0.01902030854405614,\n", + " -0.001971688240674922,\n", + " -0.036482045980481555,\n", + " -0.02035578900831569,\n", + " 0.009570839879181474,\n", + " -0.015697396553363895,\n", + " -0.023153570525449786,\n", + " 0.013752603961544084,\n", + " 0.010323878836991046,\n", + " 0.0047697192009991795,\n", + " 0.004967993166852657,\n", + " -0.028826516009707266,\n", + " -0.028631479799776947,\n", + " -0.03518872892368307,\n", + " 0.017492199538919152,\n", + " 0.02661501608933861,\n", + " 0.011507618587105932,\n", + " 0.08937264854670844,\n", + " -0.035974626807683166,\n", + " -0.01392773060046019,\n", + " 0.02177588327230479,\n", + " -0.008105526320087264,\n", + " -0.011749711962870226,\n", + " -0.02958881106181404,\n", + " -0.04113584834703108,\n", + " -0.010439436940431436,\n", + " -0.0016104413173605183,\n", + " -0.009751402544275187,\n", + " 0.00890181322096731,\n", + " -0.007701343257690091,\n", + " 0.005710042441906299,\n", + " 0.01677719023938249,\n", + " 0.005292227184027971,\n", + " -0.02925555495975571,\n", + " -0.0393073246466049,\n", + " -0.010964165301555945,\n", + " 0.044243660219018995,\n", + " -0.03754483414814466,\n", + " -0.07578828275444441,\n", + " -0.03173014152825412,\n", + " 0.01757007253270716,\n", + " -0.01912674987431223,\n", + " 0.003741362119031157,\n", + " 0.05026301638801874,\n", + " -0.019941778384780975,\n", + " -0.04791828377080902,\n", + " -0.06000561333740371,\n", + " 0.016973644688186646,\n", + " -0.052972303136457934,\n", + " 0.012114421266117658,\n", + " -0.024561841682780035,\n", + " 0.013852205209137465,\n", + " -0.02357438369002996,\n", + " 0.020809331789071423,\n", + " 0.04097805342141145,\n", + " -0.02025233932782524,\n", + " 0.018733400488394863,\n", + " -0.03316982751089806,\n", + " 0.014635486025286248,\n", + " -0.056325266774628666,\n", + " -0.017167129177937356,\n", + " -0.019923476429243164,\n", + " 0.03268263742998958,\n", + " 0.002033021857854946,\n", + " 0.03207337869495163,\n", + " 0.019247153349152182,\n", + " -0.01824869816532273,\n", + " -0.006197795229944428,\n", + " -0.001593480431816647,\n", + " 0.019517945205657684,\n", + " 0.05730857683580184,\n", + " -0.048539515778974814,\n", + " -0.013103390934655611,\n", + " 0.028959850486043985,\n", + " -0.02707529800751956,\n", + " 0.04462496291709034,\n", + " -0.011091815977041213,\n", + " -0.02083926997744,\n", + " 0.029432467745394687,\n", + " -0.006584163960698944,\n", + " -0.027833108087752356,\n", + " -0.05597509525409624,\n", + " 0.006678261607203668,\n", + " -0.0448347421372433,\n", + " -0.02724209791572991,\n", + " 0.018159073811077724,\n", + " -0.04095033602826767,\n", + " -0.014389837041239426,\n", + " -0.005018619289099737,\n", + " 0.02861722399632038,\n", + " 0.01471323888589906,\n", + " -0.027851984012904993,\n", + " -0.04575153846374975,\n", + " -0.02625084237983059,\n", + " -0.03153327328740206,\n", + " -0.013585884142117201,\n", + " -0.01303596619007699,\n", + " -0.05991148232057254,\n", + " 0.011239411262383462,\n", + " -0.011976157992565269,\n", + " 0.0062502170928832364,\n", + " 0.03484301901579612,\n", + " -0.04021101978294973,\n", + " 0.009025854061389579,\n", + " 0.06605653449818615,\n", + " -0.02067732044617818,\n", + " -0.028234593159256593,\n", + " -0.03759495637846245,\n", + " 0.009412665366098548,\n", + " 0.0004911386247848781,\n", + " -0.005969689026508831,\n", + " -0.028974213074545167,\n", + " -0.02667611548853991,\n", + " -0.018577018796100103,\n", + " -0.0280073679324391,\n", + " -0.061155701616075045,\n", + " -0.06400702248495471,\n", + " 0.0011017767014099063,\n", + " 0.013290748632471336,\n", + " 0.06223341633075622,\n", + " -0.03226627252992429,\n", + " 0.023459022471515587,\n", + " 0.006420565101052666,\n", + " 0.017577328910192248,\n", + " -0.05399460642408007,\n", + " 0.06761008337168715,\n", + " 0.014469073212862641,\n", + " -0.019989921754740677,\n", + " 0.03522717487677836,\n", + " 0.017905729629753085,\n", + " -0.0424633766624992,\n", + " 0.013880836114967628,\n", + " -0.007573665885943668,\n", + " -0.05695683357289394,\n", + " -0.007656405107585265,\n", + " 0.05706618813264852,\n", + " -0.012181352129864919,\n", + " -0.02717199186542157,\n", + " 0.045247559771607666,\n", + " 0.02734855593114922,\n", + " 0.019425310847967496,\n", + " -0.020728345343826258,\n", + " -0.005184649594156488,\n", + " 0.03847382732896613,\n", + " -0.008585583493718494,\n", + " -0.042178150471290664,\n", + " -0.013390396598521736,\n", + " -0.028018657113874856,\n", + " 0.016685757213444258,\n", + " -0.01037591151849771,\n", + " 0.026038617114749345,\n", + " -0.024052335196062622,\n", + " -0.024224539428640456,\n", + " 0.058607196437472156,\n", + " -0.020571489792896004,\n", + " -0.05633423671837661,\n", + " 0.033982264815518484,\n", + " 0.015551639973008671,\n", + " 0.0009013079066168935,\n", + " 0.014136864939054628,\n", + " -0.030965557271753333,\n", + " 0.013711153844810742,\n", + " -0.01223080611939568,\n", + " -0.02076110999884458,\n", + " 0.01631249676404574,\n", + " -0.0037432996836102694,\n", + " -0.01966825516705609,\n", + " 0.02447814222999503,\n", + " 0.014754224320836682,\n", + " 0.05327449146459758,\n", + " -0.025392547572611827,\n", + " -0.01923344481904929,\n", + " 0.03295755886439243,\n", + " 0.0001486266369938856,\n", + " -0.02102841298772044,\n", + " -0.0008756563452444943,\n", + " 0.03950083916964941,\n", + " 0.00959564070579411,\n", + " 0.04274372077494909,\n", + " -0.0236715013510778,\n", + " 0.009088138107179615,\n", + " 0.02092983871192335,\n", + " 0.021613142864306264,\n", + " -0.015009931121273504,\n", + " -0.0470831346220113,\n", + " 0.007805232592231431,\n", + " -0.06002562885920438,\n", + " -0.01623348751064234,\n", + " -0.02545974373045431,\n", + " -0.033052277198969006,\n", + " 0.003587203307606599,\n", + " 0.043600427154629154,\n", + " -0.02794800545872917,\n", + " -0.004304456344467491,\n", + " 0.01178627332677963,\n", + " 0.032994035968227566,\n", + " -0.06036074035141296,\n", + " -0.021254992498721256,\n", + " -0.010767795112826056,\n", + " 0.0064997387033138,\n", + " -0.03881439486954818,\n", + " -0.010581848145560721,\n", + " 0.03211145090039075,\n", + " -0.02671203363940712,\n", + " -0.019667674523375978,\n", + " 0.031001663964964947,\n", + " 0.010916074489860391,\n", + " 0.022261498273805146,\n", + " 0.013025116662692123,\n", + " 0.006813146969461837,\n", + " 0.006399268992974733,\n", + " 0.037655443432173516,\n", + " -0.007167248680191117,\n", + " 0.017240455464747468,\n", + " -0.02967446935276126,\n", + " 0.060492786733148965,\n", + " 0.024711357430409094,\n", + " 0.017689534964341412,\n", + " 0.014826546160820853,\n", + " -0.0445346661507675,\n", + " -0.019134588563993752,\n", + " -0.0059706893020439675,\n", + " 0.013546200149910866,\n", + " -0.04023311761312046,\n", + " 0.003893666374563631,\n", + " 0.02601194421282324,\n", + " -0.011175351080968484,\n", + " 0.03505668587897971,\n", + " -0.02469361609635553,\n", + " 0.03622086978354068,\n", + " -0.05705753854403443,\n", + " -0.05629309777993735,\n", + " 0.012980452983522293,\n", + " -0.0010806239814534103,\n", + " -0.05108282186071784,\n", + " 0.02768368577703734,\n", + " 0.0616526992359252,\n", + " 0.07650211408145763,\n", + " 0.0009831085878272742,\n", + " 0.01736572767021549,\n", + " 0.009622458768640242,\n", + " 0.03852813086118726,\n", + " 0.005858289282872352,\n", + " 0.0022355622199441943,\n", + " 0.01691396019082654,\n", + " -0.013595357977794443,\n", + " 0.00725616975480633,\n", + " -0.06296971923559634,\n", + " -0.03028501618239992,\n", + " 0.024571136987210807,\n", + " -0.035152802430234255,\n", + " 0.002036455873010498,\n", + " -0.016379646203431204,\n", + " -0.01253184483978936,\n", + " -0.030447723220072002,\n", + " 0.0182680512861435,\n", + " 0.045439058726934996,\n", + " 0.011574265803078425,\n", + " 0.03682790605483751,\n", + " -0.030729388797449044,\n", + " -0.0018702808241627996,\n", + " -0.004441211696393725,\n", + " -0.019825502819322056,\n", + " -0.024988134254962196,\n", + " -0.027046918213395884,\n", + " -0.04219252974495507,\n", + " 0.007976524146381043,\n", + " -0.01964664287513511,\n", + " 0.05305380682176297,\n", + " 0.01849482601355401,\n", + " -0.03796110227426326,\n", + " 0.0012601365240868618,\n", + " 0.03002783107650166,\n", + " 0.049196634236197696,\n", + " -0.00013338036053557766,\n", + " 0.011076182813360704,\n", + " -0.005779724689068808,\n", + " 0.016549447772505124,\n", + " -0.0635020026386276,\n", + " -0.017698329713875534,\n", + " 0.0004964580633844753,\n", + " 0.011929608889951395,\n", + " -0.06865962018271964,\n", + " -0.027011080151312136,\n", + " 0.015757408079923145,\n", + " 0.015128904677625362,\n", + " -0.006308112523616902,\n", + " -0.028648415240446905,\n", + " -0.08513119996698572,\n", + " 0.004186868073792109,\n", + " 0.027191123073571497,\n", + " 0.01413274704077153,\n", + " -0.043822696887969824,\n", + " 0.027613197636491227,\n", + " -0.02917916360846178,\n", + " 0.010800026676879447,\n", + " -0.029477267408644772,\n", + " -0.02045961577349495,\n", + " 0.020363173861557587,\n", + " 0.02593440825933231,\n", + " 0.026932863443161762,\n", + " 0.005767543685658854,\n", + " -0.021250989728064384,\n", + " 0.015061653458744247,\n", + " 0.050466411864723094,\n", + " -0.04438759978509938,\n", + " 0.04701855970331094,\n", + " -0.017619288758662012,\n", + " 0.01748081358353671,\n", + " 0.026836688493835945,\n", + " -0.014544943987064053,\n", + " -0.00285384908823683,\n", + " 0.019373753693612647,\n", + " 0.018053732033077943,\n", + " 0.0013232308431158483,\n", + " -0.0335638676625728,\n", + " 0.05575277212823326,\n", + " 0.005661190786767839,\n", + " -0.016468751649100014,\n", + " 0.020694529523524973,\n", + " 0.0783366077332845,\n", + " -0.007017715413140676,\n", + " -0.01318647804195038,\n", + " 0.018613716144089763,\n", + " -0.04012534146980664,\n", + " 0.03172896689276331,\n", + " -0.04684062244865015,\n", + " 0.0012106756519262721,\n", + " -0.002246532923326959,\n", + " 0.018620950830862664,\n", + " -0.016665968609863403,\n", + " 0.014104100284036306,\n", + " -0.04659270428240518,\n", + " 0.026127891080566696,\n", + " 0.011544961650912271,\n", + " -0.02491286080960412,\n", + " 0.00032323666154374065,\n", + " 0.017899684595117888,\n", + " 0.00899160442684453,\n", + " 0.009243483650838312,\n", + " 0.025179878482188885,\n", + " -0.006440656540345996,\n", + " 0.01786755397630199,\n", + " 0.03344447530862408,\n", + " 0.046117821177888985,\n", + " 0.0014280142938413278,\n", + " 0.01816836410995953,\n", + " 0.016760398291116238,\n", + " 0.012668788316930916,\n", + " 0.029652324804133507,\n", + " 0.01750665222730173,\n", + " -0.04020329121534547,\n", + " 0.017954830728081708,\n", + " -0.012878016926697558,\n", + " -0.07278319142131416,\n", + " 0.04341940981883765,\n", + " 0.009475537563892147,\n", + " -0.04813034552129069,\n", + " 0.040767937160961415,\n", + " -0.013246505419414691,\n", + " -0.05513460351603496,\n", + " -0.057782568952602004,\n", + " -0.001279822305349366,\n", + " -0.030784987098336172,\n", + " 0.030994219045135454,\n", + " 0.018162360788232388,\n", + " -0.03024641338877035,\n", + " -0.031773179238267994,\n", + " 0.008865744903631106,\n", + " -0.04380713296771668,\n", + " 0.016046460179058402,\n", + " 0.02755386853310774,\n", + " 0.046090490880531944,\n", + " -0.025823969163968317,\n", + " 0.11210275296662581,\n", + " 0.06176788692874207,\n", + " 0.035427250006001616,\n", + " 0.006006106480885448,\n", + " 0.01281462915736041,\n", + " 0.02282760583971931,\n", + " 0.007965040582793754,\n", + " 0.02477016428818374,\n", + " -0.022583787218077897,\n", + " -0.03815561790710107,\n", + " -0.031768981251201436,\n", + " -0.06439372450184458,\n", + " -0.007164075996404523,\n", + " 0.0012513848431303046,\n", + " -0.013000723788320257,\n", + " -0.028711128094931735,\n", + " 0.0064579982647404,\n", + " 0.013615733899120703,\n", + " -0.028596681223361908,\n", + " 0.009222382759170073,\n", + " 0.08239403898486847,\n", + " -0.007579147796320135,\n", + " -0.0009819386450386272,\n", + " -0.01604653025674393,\n", + " 0.011562596199921215,\n", + " -0.055603219673245115,\n", + " 0.016465241090758184,\n", + " -0.04548907083517552,\n", + " 0.02880679748381194,\n", + " -0.049001127504664524,\n", + " -0.06888689880205945,\n", + " -0.006270052414921115,\n", + " -0.037703259772934074,\n", + " -0.02935620653837389,\n", + " -0.02511659332660555,\n", + " 0.009819619834106836,\n", + " -0.02706750770481138,\n", + " 0.03534383420099138,\n", + " 0.02321938682029384,\n", + " 0.012501044862740385,\n", + " 0.023659324518958653,\n", + " -0.04127680460592744,\n", + " 0.0005257046517375156,\n", + " 0.06030849576833338,\n", + " 0.021986273169431657,\n", + " -0.0033540373284240417,\n", + " 0.0658876873204485,\n", + " 0.014131071850384078,\n", + " 0.01260874091727074,\n", + " -0.032050329810477264,\n", + " 0.012457927063943039,\n", + " 0.028062519070951888,\n", + " 0.011588052753448461,\n", + " -0.012108903482640264,\n", + " -0.0019009170780476619,\n", + " 0.0068592468240559696,\n", + " -0.029111258331182374,\n", + " -0.03894048131098142,\n", + " 0.017722980374019176,\n", + " 0.013040879970645762,\n", + " -0.014659244029197532,\n", + " -0.04112731221752689,\n", + " -0.047705651387745164,\n", + " 0.010998592633089303,\n", + " -0.03386130405622693,\n", + " 0.02523504463734857,\n", + " -0.0017746748389028393,\n", + " 0.059299363748558404,\n", + " -0.026496446313936713,\n", + " -0.030176579306622524,\n", + " -0.020962960429434616,\n", + " -0.020020809328896563,\n", + " 0.2183907932911652,\n", + " 0.08440069682488145,\n", + " 0.015938588930314218,\n", + " -0.018099833139059315,\n", + " 0.030925976727558976,\n", + " 0.023049999043267818,\n", + " 0.02842453705737167,\n", + " 0.020570456981292583,\n", + " 0.03094853173120198,\n", + " 0.031011177845033922,\n", + " 0.0027533948116737436,\n", + " -0.02075156274844916,\n", + " 0.0031692537341643356,\n", + " -0.015771096587830172,\n", + " -0.012238245199418045,\n", + " 0.010393395901037659,\n", + " -0.031878319125792795,\n", + " 0.005381986688095071,\n", + " -0.0553822413715378,\n", + " -0.03349025939650424,\n", + " -0.03179771310226909,\n", + " 0.022904018881725424,\n", + " -0.008520004962450346,\n", + " -0.010858477306419312,\n", + " 0.023655994160379615,\n", + " -0.03404780413665306,\n", + " -0.0033941555520032177,\n", + " -0.004095508879701135,\n", + " -0.014870448162289614,\n", + " -0.01945085583285982,\n", + " 0.02117800048155135,\n", + " -0.03506357685139024,\n", + " 0.052941539032509886,\n", + " 0.012844790926916157,\n", + " -0.030419782245741055,\n", + " 0.021329461719212052,\n", + " -0.014221662275579614,\n", + " -0.04174537070764793,\n", + " 0.013309597861362814,\n", + " 0.0026689895764844657,\n", + " -0.012453413727291588,\n", + " -0.030384032615022382,\n", + " -0.004265092707395013,\n", + " 0.00231762882094342,\n", + " -0.014363134106019523,\n", + " 0.06491976097976568,\n", + " 0.01716421761695518,\n", + " 0.010340994477423798,\n", + " 0.05009239390891432,\n", + " -0.06471981933184819,\n", + " 0.0359715800968789,\n", + " -0.031220092773731997,\n", + " 0.03650257206827681,\n", + " -0.027435080182067643,\n", + " -0.04055586873641445,\n", + " -0.03477177003180709,\n", + " -0.023628970870025856,\n", + " -0.029537077044729042,\n", + " -0.01642846532250138,\n", + " 0.01825142285247682,\n", + " -0.012776498553850012,\n", + " 0.0331190078407576,\n", + " -0.007710117985028348,\n", + " 0.00722714758189862,\n", + " 0.004677362234147552,\n", + " 0.02553129304738132,\n", + " -0.0015531325786786333,\n", + " -0.0032978283505630023,\n", + " -0.045433459186157814,\n", + " 0.011758890471158445,\n", + " -0.004907923242222132,\n", + " ...]],\n", + " {'id': 'b6c62336-83da-40c7-a80b-d9289eb5a65d'}),\n", + " ('5f90161b-d63f-4539-b9ad-b5f9830965f3',\n", + " [[0.019948402701294755,\n", + " 0.007320638474416905,\n", + " -0.03162028520128323,\n", + " -0.033802843277329186,\n", + " 0.026864993455415466,\n", + " -0.01748547816788136,\n", + " 0.004478703086885245,\n", + " -0.028244337777621052,\n", + " -0.028507416622771873,\n", + " 0.03676196923369405,\n", + " 0.01877452823547576,\n", + " 0.006304441643173165,\n", + " 0.009876524392338817,\n", + " -0.04511359960977658,\n", + " 0.00988390794355125,\n", + " 0.002929455513469054,\n", + " 0.015589575110956325,\n", + " -0.04441865081990328,\n", + " -0.041506334815393925,\n", + " -0.012790419317407034,\n", + " 0.005603367861953193,\n", + " -0.018776043592405034,\n", + " -0.05583683138137712,\n", + " -0.017288022413912675,\n", + " -0.01079542056196113,\n", + " 0.03005266710328108,\n", + " 0.02571998026748768,\n", + " -0.005926487216578579,\n", + " 0.08445150612022345,\n", + " 0.048468467638838705,\n", + " 0.0058480895308802615,\n", + " -0.012218397502058099,\n", + " 0.01114361873475997,\n", + " -0.01733892586351336,\n", + " -0.02380030610812849,\n", + " -0.026625497564355234,\n", + " 0.004909375069068835,\n", + " -0.023106445527593666,\n", + " -0.009225408233911327,\n", + " -0.036832516387381255,\n", + " 0.013296155284796047,\n", + " -0.017593783812571366,\n", + " 0.006024022639871934,\n", + " -0.04516128080711491,\n", + " -0.06982265035212534,\n", + " 0.003918743529643628,\n", + " 0.036332085864274825,\n", + " -0.047807317749602674,\n", + " -0.008312517556012073,\n", + " -0.05054262514850781,\n", + " -0.02353012847682577,\n", + " 0.010917383911861003,\n", + " -0.013921833278664171,\n", + " 0.03947658709492724,\n", + " 0.001300981808382305,\n", + " -0.03100491468558839,\n", + " 0.028797961736023305,\n", + " 0.011256547574108357,\n", + " -0.008408040978550342,\n", + " -0.014869105947562735,\n", + " 0.049403792040405674,\n", + " 0.020435249253001636,\n", + " 0.005037673170293011,\n", + " -0.08633900352277235,\n", + " -0.020513418957148515,\n", + " 0.021429903098906897,\n", + " 0.01523513244718719,\n", + " -0.0022333197787526675,\n", + " -0.004918039639296144,\n", + " -0.007966602588787878,\n", + " -0.08720723863210146,\n", + " -0.02446973960957186,\n", + " -0.005403344984864388,\n", + " -0.0429423473846459,\n", + " 0.004863410089729265,\n", + " 0.01203993625563912,\n", + " -0.011045044258001688,\n", + " 0.019328450519179523,\n", + " 0.0010478032104179003,\n", + " -0.00781999943351621,\n", + " 0.013017555048815967,\n", + " 0.008565316890986628,\n", + " -0.048162996090330916,\n", + " 0.006060235263403255,\n", + " -0.024139524011339755,\n", + " -0.04758899108973897,\n", + " 0.0518901198547167,\n", + " -0.01984179039169686,\n", + " -0.0246993009290857,\n", + " -0.04949115050284516,\n", + " -0.030238300022147214,\n", + " 0.013771538347786287,\n", + " 0.00021846332166730917,\n", + " 0.07358136608792924,\n", + " -0.004310788741644312,\n", + " 0.043157572139385925,\n", + " -0.015236107089507471,\n", + " 0.033461338778483035,\n", + " 0.011331645883673754,\n", + " -0.011023445336669173,\n", + " -0.0034910882773200773,\n", + " 0.09149978715460046,\n", + " -0.04521714560988336,\n", + " 0.04343493330834665,\n", + " -0.043943801691401545,\n", + " -0.02862737729458233,\n", + " 0.03350815551045902,\n", + " -0.015435169732032033,\n", + " -0.030697571823825657,\n", + " -0.05289720505153839,\n", + " 0.0034574135376392655,\n", + " 0.0001748952208599547,\n", + " 0.01632608942948694,\n", + " 0.012033362081310051,\n", + " -0.032192452789222684,\n", + " 0.037489059184745065,\n", + " -0.03567959757896339,\n", + " 0.0006676827472058696,\n", + " -0.04925450734748064,\n", + " -0.008771480014693212,\n", + " 0.00851274807429085,\n", + " -0.015410198548271341,\n", + " 0.0066812510769201855,\n", + " 0.025976107794107233,\n", + " 0.017765383577055555,\n", + " -0.0386096300383103,\n", + " 0.008374015791391758,\n", + " 0.013606575473745686,\n", + " -0.01333777505441728,\n", + " 0.010563922670532638,\n", + " 0.030045412374357938,\n", + " 0.00462937134814755,\n", + " 0.0018764254657904923,\n", + " 0.051228264832949816,\n", + " -0.046499595230181795,\n", + " -0.02247519427468365,\n", + " -0.015008223849814635,\n", + " 0.008162180249381134,\n", + " -0.009227473628115263,\n", + " 0.019675236731886547,\n", + " -0.02316053054873378,\n", + " -0.04553155674725579,\n", + " -0.003791853998451026,\n", + " 0.10501517085528524,\n", + " -0.0056925967901325595,\n", + " -0.004059845040896067,\n", + " -0.005267007779528007,\n", + " -0.005129170472533452,\n", + " -0.046665067460774125,\n", + " 0.023022327962747884,\n", + " 0.013100894594835646,\n", + " 0.01483829369000084,\n", + " -0.008982578218600667,\n", + " 0.030186310058238176,\n", + " -0.052930753087717566,\n", + " -0.048650835929689426,\n", + " -0.006158499549649168,\n", + " 0.004124226946271197,\n", + " -0.01855226063057787,\n", + " 0.027615352779984458,\n", + " 0.01416675241860048,\n", + " -0.02357168383530236,\n", + " -0.03623349952230569,\n", + " 0.06190325265721145,\n", + " -0.004673782408622566,\n", + " 0.07518412555042547,\n", + " -0.012852826936447296,\n", + " -0.018272924751524745,\n", + " -0.0021370458816277822,\n", + " -0.013358156944122034,\n", + " 0.012014675221727528,\n", + " 0.039427349859936706,\n", + " -0.009524893783542461,\n", + " 0.0010205072928445706,\n", + " 0.025587145841898886,\n", + " 0.022818559916604014,\n", + " 0.054964673972345175,\n", + " 5.649217579244451e-07,\n", + " -0.001782250227835693,\n", + " -0.002632569292530359,\n", + " -0.015790520931946682,\n", + " -0.04274217788745152,\n", + " 0.013948996983887966,\n", + " 0.013507943365963249,\n", + " -0.005963940602159372,\n", + " -0.008879088154487918,\n", + " -0.001067278894643557,\n", + " 0.015623041785689632,\n", + " -0.0008184630923350069,\n", + " 0.024784786383240123,\n", + " -0.018840715771688592,\n", + " 0.059128142560976114,\n", + " -0.03425419928833682,\n", + " 0.009861876789537566,\n", + " -0.04011633214377976,\n", + " 0.034747290331013994,\n", + " -0.036670681691431235,\n", + " -0.017944226205252038,\n", + " 0.011045611245577574,\n", + " -0.02753872724827893,\n", + " -0.009149134420956162,\n", + " 0.06951932132169206,\n", + " 0.013249955543784548,\n", + " 0.005180948557677209,\n", + " -0.0635340818185318,\n", + " 0.03173262840772439,\n", + " 0.019600729987833805,\n", + " 0.05046521651288581,\n", + " -0.04239150666570435,\n", + " 0.007334304654777384,\n", + " -0.011354752534299992,\n", + " -0.0076735801890128056,\n", + " -0.026682777717275747,\n", + " 0.00789523588803657,\n", + " 0.025286144087823415,\n", + " -0.04089081852710857,\n", + " -0.03479763272564422,\n", + " -0.0030830674064173764,\n", + " 0.022509994938123154,\n", + " 0.01764198029906685,\n", + " 0.023575684106390823,\n", + " -0.008028163540282084,\n", + " 0.019027662338899454,\n", + " -0.022199180654670095,\n", + " 0.009707081553685163,\n", + " -0.013180093182266718,\n", + " 0.0501215728860254,\n", + " 0.034079923071289625,\n", + " 0.027562184770140472,\n", + " 0.03175225685653981,\n", + " 0.012722572346704536,\n", + " 0.013845780667139958,\n", + " -0.016054021839597938,\n", + " 0.01880818136352251,\n", + " 0.011662868329798461,\n", + " 0.023950616294277475,\n", + " 0.024750384051879345,\n", + " 0.0494932353898955,\n", + " 0.024152377424756625,\n", + " 0.001159512475593342,\n", + " -0.020342334481821482,\n", + " -0.029944686904374546,\n", + " 0.05752189133642809,\n", + " 0.027321141316578642,\n", + " 0.008844893464317112,\n", + " 0.022118490440731463,\n", + " 0.008357565524055517,\n", + " -0.005531607066698466,\n", + " -0.05203859771330355,\n", + " -0.025494897217556767,\n", + " -0.029328841780245477,\n", + " 0.04318996077496149,\n", + " -0.0098791271110915,\n", + " -0.036562145522644605,\n", + " -0.03952087145190062,\n", + " -0.02036588353530962,\n", + " -0.000813748789807555,\n", + " 0.022444376932031388,\n", + " 0.021510017002603972,\n", + " -0.06530612732939538,\n", + " -0.01581511751405036,\n", + " 0.026220715896016114,\n", + " 0.014066229504716695,\n", + " -0.025525629808702918,\n", + " 0.042947483325916254,\n", + " 0.043273515589806695,\n", + " 0.007258602914488473,\n", + " 0.04006274885155582,\n", + " 0.0030426352172755297,\n", + " -0.021102301237123273,\n", + " -0.019519465158683736,\n", + " -0.07492236543870975,\n", + " -0.0601558528843251,\n", + " 0.005922111072560512,\n", + " -0.05263858074554879,\n", + " -0.07422978969100757,\n", + " 0.032372573470131324,\n", + " -0.024163112050520706,\n", + " 0.020967425995267314,\n", + " -0.0011968933986364091,\n", + " -0.020909420369454482,\n", + " 0.013592171955282038,\n", + " -0.02835400114143896,\n", + " 0.0657044661882399,\n", + " 0.05920431721466913,\n", + " 0.012411260571910604,\n", + " -0.05828141399383786,\n", + " 0.03995802650054421,\n", + " -0.0054076512088899135,\n", + " 0.03704476466922665,\n", + " 0.012164943032123456,\n", + " -0.050213229944854866,\n", + " -0.01953925294033065,\n", + " -0.0034337451964062734,\n", + " -0.010403762664343376,\n", + " -0.0051951190094990515,\n", + " -0.013831388166372104,\n", + " -0.016010688394523128,\n", + " -0.047186407875468385,\n", + " 0.015489441206485592,\n", + " -0.0011987325063190287,\n", + " -0.024028879225081075,\n", + " 0.011299562353520184,\n", + " 0.028093802152465812,\n", + " -0.015388566573851443,\n", + " 0.02361283577660986,\n", + " 0.02291667504019917,\n", + " -0.028193035995941643,\n", + " 0.027049931411931485,\n", + " 0.040623186831049436,\n", + " -0.013116352422035323,\n", + " 0.02246116959545232,\n", + " -0.020493609140110013,\n", + " 0.056909876980375265,\n", + " -0.016530343949275893,\n", + " 0.0666564086651252,\n", + " 0.04464433730055771,\n", + " 0.011051809970734568,\n", + " 0.013740944749110159,\n", + " -0.023054740328745164,\n", + " -0.014982637370119574,\n", + " 0.004136063341614728,\n", + " -0.020038388460423696,\n", + " -0.044038560655354685,\n", + " -0.010308009565223547,\n", + " 0.045289113198813,\n", + " -0.0138478960647325,\n", + " -0.05097309160828908,\n", + " -0.016907611888670977,\n", + " -0.04404215411921381,\n", + " -0.05687246766557765,\n", + " 0.01287096630130034,\n", + " 0.004505159540790541,\n", + " 0.05129089958602646,\n", + " 0.008466039824242683,\n", + " 0.036505599317767014,\n", + " -0.0036099217540992512,\n", + " 0.041166694849682224,\n", + " -0.03897079010645383,\n", + " 0.0008091556819338424,\n", + " 0.0032634648009934723,\n", + " -0.047084343331688434,\n", + " 0.012783996848273905,\n", + " 0.030969952994287276,\n", + " 0.017862663050800455,\n", + " 0.015429530366815374,\n", + " 0.06943027121919058,\n", + " -0.02843125722434996,\n", + " -0.004982010499866632,\n", + " 0.027369753085454075,\n", + " 0.02925001948950154,\n", + " 0.03037071916535606,\n", + " -0.005207179148818718,\n", + " -0.016329454064279568,\n", + " 0.03653542506779777,\n", + " -0.02387390770609596,\n", + " -0.03136944786367516,\n", + " 0.0024378713525705395,\n", + " -0.014167975382833669,\n", + " 0.02668813740252224,\n", + " 0.031658986129033985,\n", + " 0.044371949349975605,\n", + " 0.010453832159123926,\n", + " 0.028282779365763105,\n", + " -0.009560915716185031,\n", + " -0.0033665525073309564,\n", + " 0.0022216872107813338,\n", + " 0.006260751394257628,\n", + " 0.03786382356464962,\n", + " -0.024574850122481458,\n", + " 0.019095244884902884,\n", + " -0.0027509152362706264,\n", + " -0.04027293936683282,\n", + " -0.014577123448767652,\n", + " -0.0148953789144573,\n", + " -0.07104484843169899,\n", + " 0.05129807973362423,\n", + " -0.007862953191843639,\n", + " 0.024357079432497847,\n", + " -0.014121933279623536,\n", + " -0.008616271191490983,\n", + " 0.03256198630617003,\n", + " 0.0033154549767812737,\n", + " 0.010306461155206888,\n", + " 0.04289656801110471,\n", + " 0.04068958116093718,\n", + " -0.00571901129204233,\n", + " 0.001813562731162412,\n", + " -0.018315531028677116,\n", + " 0.027980224964095673,\n", + " 0.003354139377988336,\n", + " 0.01797734709384041,\n", + " -0.0445602638064951,\n", + " 0.016564210651118048,\n", + " -0.042367979647607794,\n", + " -0.0038956724746499567,\n", + " 0.030645188612928264,\n", + " 0.02807814854928702,\n", + " 0.0012013203935581432,\n", + " -0.02081225954781814,\n", + " 0.04989480836621324,\n", + " -0.0338980802397768,\n", + " 0.03761506772403164,\n", + " 0.02425663872757499,\n", + " 0.011663206488307844,\n", + " -0.03652626512501724,\n", + " -0.07035570342496422,\n", + " 0.02849833126131672,\n", + " 0.007230152681401905,\n", + " -0.01767370617786463,\n", + " -0.013374386010027325,\n", + " 0.008082696896889528,\n", + " -0.039348954716783575,\n", + " 0.00785633325170127,\n", + " 0.017981735526826862,\n", + " 0.014012178384179026,\n", + " -0.02191075263404133,\n", + " -0.004679177265744093,\n", + " 0.03527805511318785,\n", + " -0.008349523453640597,\n", + " -0.003912197323311576,\n", + " 0.023321960132484592,\n", + " -0.047904758251209184,\n", + " 0.020673363694512253,\n", + " 0.041959236693873025,\n", + " -0.03426214558954983,\n", + " 0.007695646938659055,\n", + " -0.04242006114314344,\n", + " -0.016130169372808995,\n", + " 0.042354046500003066,\n", + " -0.014651969198844837,\n", + " -0.050366379306458584,\n", + " 0.0049666929364095705,\n", + " 0.001299341442981517,\n", + " -0.05396454856920536,\n", + " 0.07894082081032368,\n", + " 0.009990207520091594,\n", + " -0.00993794465633288,\n", + " 0.0029446847234809942,\n", + " -0.03350719951347009,\n", + " 0.0014784061201240093,\n", + " 0.012295099310119125,\n", + " 0.006934187709334642,\n", + " -0.03166361356126768,\n", + " 0.027474187281344906,\n", + " -0.032561938845326606,\n", + " -0.0058508422597987626,\n", + " 0.013338201354493021,\n", + " 0.030453795981706794,\n", + " 0.02152755208921844,\n", + " 0.033703426370600156,\n", + " -0.039480543295232526,\n", + " 0.04771122310191322,\n", + " -0.007508567311583599,\n", + " -0.0021638957944002,\n", + " -0.018305823591167104,\n", + " 0.017114675073372464,\n", + " -0.008779450893842997,\n", + " 0.02980513168435111,\n", + " 0.012933172211756057,\n", + " 0.0001476079373463101,\n", + " -0.024522027898782408,\n", + " 0.014773228263728922,\n", + " -0.0074956537245973905,\n", + " -0.07305436766268698,\n", + " 0.01641090873680333,\n", + " 0.012204977948580404,\n", + " 0.002429736691135193,\n", + " 0.017177918342262986,\n", + " 0.020260493342429853,\n", + " -0.034806596044930575,\n", + " 0.013446185790974861,\n", + " 0.02635603862582434,\n", + " -0.02347111939318058,\n", + " 0.019236537510801604,\n", + " 0.015358101797464606,\n", + " 0.015518573689196357,\n", + " 0.0036542739122775196,\n", + " -0.03466709845593129,\n", + " 0.011275693786854004,\n", + " -0.03134855153232831,\n", + " 0.017550713097165504,\n", + " -0.013073626645259333,\n", + " -0.016284632382757444,\n", + " 0.014650331799746763,\n", + " -0.043518423712047206,\n", + " -0.03474649705691679,\n", + " 0.0013311342754691226,\n", + " -0.017109066218698003,\n", + " 0.0007250415071483041,\n", + " 0.02540110781083354,\n", + " -0.006012900699724922,\n", + " 0.018689337716562605,\n", + " -0.0011025120026429936,\n", + " 0.007734942821982713,\n", + " -0.02392530101940197,\n", + " -0.05367391192432732,\n", + " 0.0314581216694895,\n", + " 0.025000439980601072,\n", + " 0.026461357627438974,\n", + " 0.05855638856038649,\n", + " -0.012716468543234386,\n", + " -0.014080277066854675,\n", + " 0.034466298407531445,\n", + " -0.02560315709643347,\n", + " -0.017891888760167944,\n", + " -0.026846317613528736,\n", + " -0.04968168205876454,\n", + " -0.033439211855267444,\n", + " -0.01029176015895679,\n", + " -0.02101693613010753,\n", + " -0.0190395834857491,\n", + " 0.00219060290088482,\n", + " 0.008655734035281679,\n", + " 0.010567627158864772,\n", + " 0.0024119556132742356,\n", + " 0.0010355555583909627,\n", + " -0.033853606039429725,\n", + " 0.005219324039644495,\n", + " 0.04737189163168376,\n", + " -0.01200797307262423,\n", + " -0.0889573368929424,\n", + " -0.03781682715948067,\n", + " 0.026015278245201844,\n", + " -0.02709490395113449,\n", + " -0.019596570383913853,\n", + " 0.018392341318665965,\n", + " -0.006636119204885691,\n", + " -0.027521048084104074,\n", + " -0.05340229351742071,\n", + " 0.02828069278368264,\n", + " -0.053783465111187284,\n", + " 0.028987035566037565,\n", + " -0.031073271860357677,\n", + " -0.0017689296217413839,\n", + " -0.0017239297501776582,\n", + " -0.00807700498573908,\n", + " 0.03611578985049755,\n", + " -0.020009379714911856,\n", + " 0.0016034297409701845,\n", + " -0.02719262582774129,\n", + " 0.011621933352346608,\n", + " -0.04104949029685051,\n", + " -0.019315073341454884,\n", + " -0.031175292333354455,\n", + " 0.009006045063127879,\n", + " -0.003187319810327598,\n", + " 0.03711572202020343,\n", + " 0.014049393618027645,\n", + " -0.02581612068099044,\n", + " -0.007692083137827067,\n", + " 0.029064220457683428,\n", + " 0.023395814289940274,\n", + " 0.06822526074505635,\n", + " -0.046489730154870415,\n", + " -0.02588126407864798,\n", + " 0.01581530566239393,\n", + " -0.022425270552493614,\n", + " 0.04751716249327936,\n", + " 0.010414700693722126,\n", + " -0.01631577178113293,\n", + " 0.025004872484370704,\n", + " 0.006070820726516581,\n", + " -0.008595917269783246,\n", + " -0.04597909555042816,\n", + " -0.01682714033361811,\n", + " 0.0004912805386278698,\n", + " -0.040300680229813236,\n", + " 0.007200449821055008,\n", + " -0.03285695544804048,\n", + " -0.009160270768859214,\n", + " 0.007560175893715134,\n", + " 0.02207086856944741,\n", + " 0.011052033714710703,\n", + " -0.01631012563579578,\n", + " 0.0011530821669458974,\n", + " -0.015351357272608251,\n", + " -0.04379888000601113,\n", + " -0.032308769146270344,\n", + " 0.00908675646494304,\n", + " -0.05592811214351944,\n", + " 0.004701399958161589,\n", + " -0.0031394589397963136,\n", + " 0.0026401157785132918,\n", + " 0.05057638675848239,\n", + " -0.015506816960268571,\n", + " 0.017202501364125686,\n", + " 0.06369814717412269,\n", + " 0.018266303963867313,\n", + " -0.03229645983752271,\n", + " -0.007934782635818316,\n", + " 0.016250430064951086,\n", + " 0.004985990006836098,\n", + " 0.0070927993055072466,\n", + " 0.0021089753352885932,\n", + " -0.0426641115801421,\n", + " -0.036406416325194854,\n", + " -0.0046755986333985354,\n", + " -0.052224983225543954,\n", + " -0.06879242460407498,\n", + " 0.01582516395758482,\n", + " 0.01140119551213403,\n", + " 0.07571528696964794,\n", + " 0.005627956816421321,\n", + " 0.01775034187975089,\n", + " -0.015477603116111937,\n", + " -0.0036501867708952926,\n", + " -0.05088400760518516,\n", + " 0.0682561306336424,\n", + " -0.005973734486205616,\n", + " -0.00601638822420141,\n", + " 0.04892507146310476,\n", + " 0.00199247994068427,\n", + " -0.023615432562757117,\n", + " 0.03805964361455035,\n", + " -0.004322124255586725,\n", + " -0.0636519406529907,\n", + " -0.010979263529018188,\n", + " 0.05459178090569557,\n", + " 0.014672346850974285,\n", + " -0.034614274537202123,\n", + " 0.028362855978797494,\n", + " 0.006778891592081118,\n", + " 0.03755577218029579,\n", + " -0.033751304191432664,\n", + " -0.014018920366490195,\n", + " 0.040525088657755504,\n", + " -0.009198984409335883,\n", + " -0.05477262366943628,\n", + " -0.018498253580823378,\n", + " -0.023554804725345196,\n", + " -0.007548629348522504,\n", + " 0.0015443228752748176,\n", + " 0.022554918321452615,\n", + " -0.014215137053439533,\n", + " -0.016513837745945618,\n", + " 0.06763763448240216,\n", + " -0.026361457637125107,\n", + " -0.04221085035527708,\n", + " 0.029505616471692836,\n", + " 0.03716489484404933,\n", + " -0.007265048266528252,\n", + " 0.009777609214525966,\n", + " -0.011703470233831295,\n", + " 0.00873025094951515,\n", + " -0.0316613659513256,\n", + " -0.028650012726834588,\n", + " 0.011195885836093992,\n", + " -0.014384174779893974,\n", + " -0.019280267592925015,\n", + " 0.029520468325623814,\n", + " 0.02690383168060612,\n", + " 0.07055303883179423,\n", + " -0.03224239515674406,\n", + " -0.002914253635817835,\n", + " 0.010913887064718842,\n", + " 0.004313788521203128,\n", + " -0.026417946210978545,\n", + " -0.008531441713993863,\n", + " 0.05165034767374654,\n", + " 0.006425235846046228,\n", + " 0.0357542161950042,\n", + " -0.030996737860278754,\n", + " -0.00016618292494076682,\n", + " 0.028126119596776248,\n", + " 0.0294878017051082,\n", + " -0.05476023299924278,\n", + " -0.06197813230789108,\n", + " 0.013486789390037819,\n", + " -0.04681314529225213,\n", + " -0.01236191824505249,\n", + " -0.01086744239185468,\n", + " -0.03522129533451486,\n", + " -0.0018320300843441071,\n", + " 0.027630357186626438,\n", + " -0.020645733008489785,\n", + " -0.02121816671615842,\n", + " 0.014755194838258517,\n", + " 0.032039208505932115,\n", + " -0.05426537912523848,\n", + " -0.026496765111662237,\n", + " -0.0027276865434755874,\n", + " 0.004843784607216556,\n", + " -0.022407167630788198,\n", + " 0.011862649665094847,\n", + " 0.018321921292237923,\n", + " -0.039641876262266375,\n", + " -0.012789214150990129,\n", + " 0.032844324083569625,\n", + " 0.018835695092466548,\n", + " 0.032525248223300815,\n", + " 0.0078897711109225,\n", + " 0.007483330008093717,\n", + " -0.014339487005751505,\n", + " 0.010730286486969262,\n", + " 0.003666385326258383,\n", + " 0.028225661935734322,\n", + " -0.053684129565904126,\n", + " 0.04538796057542096,\n", + " 0.010088388749861519,\n", + " 0.0040483870610273235,\n", + " 0.03409528343425727,\n", + " -0.043360419784173425,\n", + " 0.0003083255554760944,\n", + " 0.017370024581165915,\n", + " 0.008353494909216982,\n", + " -0.028789011976977932,\n", + " -0.003687736349435522,\n", + " 0.024040000317713025,\n", + " -0.013106698377974162,\n", + " 0.040416501638065205,\n", + " -0.0245350050493982,\n", + " 0.058005219005601225,\n", + " -0.04773633327814393,\n", + " -0.044307819580331156,\n", + " 0.02314849922492619,\n", + " -0.013815300635482019,\n", + " -0.06400056088828997,\n", + " 0.022913290065045076,\n", + " 0.06192669153374165,\n", + " 0.0705363461751505,\n", + " 0.030157704729895426,\n", + " 0.026374473773433712,\n", + " -0.009771714747275912,\n", + " 0.02185730324919705,\n", + " 0.008162090412784656,\n", + " 0.019042141286203542,\n", + " 0.02262143299847904,\n", + " -0.024765210480358492,\n", + " -0.0017125796166004414,\n", + " -0.058390845138529025,\n", + " -0.007678330510930355,\n", + " 0.023032055740619364,\n", + " -0.053444713341259636,\n", + " 0.006010531047614045,\n", + " 0.009327897382736898,\n", + " -0.004909227177690671,\n", + " -0.010393567058158163,\n", + " 0.02630371135092098,\n", + " 0.041894696726939,\n", + " 0.04128038391004043,\n", + " 0.02873081481276129,\n", + " -0.0026144420047669362,\n", + " -0.02735077891826583,\n", + " -0.007071227504656687,\n", + " -0.03907892624813161,\n", + " -0.018820763572119825,\n", + " -0.03508336056328828,\n", + " -0.03473733372407602,\n", + " 0.007710349629939278,\n", + " -0.054389333288016914,\n", + " 0.046744415210855955,\n", + " 0.0018591181488486982,\n", + " -0.02170397760446062,\n", + " 0.01959886375966923,\n", + " 0.05004605251395957,\n", + " 0.032062332106859576,\n", + " -0.0024457869313625883,\n", + " 0.023693041211933535,\n", + " 0.01550873912442718,\n", + " 0.021409400014548403,\n", + " -0.06867499969732736,\n", + " -0.028148632986859705,\n", + " -0.013163580198815954,\n", + " 0.018809557727981763,\n", + " -0.07973614928403543,\n", + " -0.043777770100868876,\n", + " 0.012019841673540085,\n", + " 0.02883028596045423,\n", + " -0.006836099282463909,\n", + " -0.0420702543867586,\n", + " -0.0765059575005838,\n", + " -0.01386656427898368,\n", + " 0.03249760906212774,\n", + " 0.03298717783221092,\n", + " -0.039343788264971015,\n", + " 0.02164422440259177,\n", + " -0.024460367788026047,\n", + " 0.03505615532982649,\n", + " -0.04843355001832077,\n", + " -0.035549436215877354,\n", + " 0.019198051851876376,\n", + " 0.051023840810148643,\n", + " 0.01659250239888819,\n", + " 0.006810030142941543,\n", + " -0.014457201762650007,\n", + " 0.03907107486860544,\n", + " 0.04713017355613328,\n", + " -0.04078308919266012,\n", + " 0.03986024021279277,\n", + " -0.015176301341704833,\n", + " 0.003233631423327164,\n", + " 0.014212825032352812,\n", + " -0.020583067744870943,\n", + " -0.006952896180551224,\n", + " 0.024954362281758374,\n", + " 0.021552124940900405,\n", + " 0.012476430242535039,\n", + " -0.009802608366283674,\n", + " 0.08314605494116636,\n", + " 0.014927330232261334,\n", + " -0.019982852493498956,\n", + " 0.033552182222853826,\n", + " 0.05956954196504577,\n", + " 0.004457610132044216,\n", + " 0.006817543363958328,\n", + " 0.015435178207182645,\n", + " -0.030253175606499903,\n", + " 0.015037891962044025,\n", + " -0.04643086853884586,\n", + " 0.01428837337241566,\n", + " 0.0061461105744857894,\n", + " 0.012306328884678898,\n", + " -0.01626075618845571,\n", + " 0.016623514670004506,\n", + " -0.0448680236456679,\n", + " 0.02201456644890742,\n", + " 0.025830475891095606,\n", + " -0.03406291852910341,\n", + " -0.025755318255475928,\n", + " 0.039791842357300544,\n", + " 0.013676666664330017,\n", + " -0.0011727294729714298,\n", + " -0.00013521986266768393,\n", + " -0.004786008234227757,\n", + " 0.011594523020240027,\n", + " 0.04044139146038003,\n", + " 0.04964390661746052,\n", + " 0.0013568563575739618,\n", + " 0.014583191656605234,\n", + " 0.009491078780119035,\n", + " -0.00355425696488522,\n", + " 0.02800541311171201,\n", + " 0.059762965242353666,\n", + " -0.027893966576205513,\n", + " 0.035286662476148534,\n", + " -0.024354348738970934,\n", + " -0.05188175318603337,\n", + " 0.04933725532798766,\n", + " -0.01568853944464272,\n", + " -0.02533690684992396,\n", + " 0.018223623105389563,\n", + " -0.008597604672269926,\n", + " -0.04839881885111628,\n", + " -0.04281984077759185,\n", + " 0.010786591997569496,\n", + " -0.0448623351245777,\n", + " 0.04651471489887209,\n", + " 0.022197289001053687,\n", + " -0.03116689854418917,\n", + " -0.04347976346501939,\n", + " 0.024125273893102168,\n", + " -0.061761188792607113,\n", + " 0.013932645875813875,\n", + " 0.01868531202002231,\n", + " 0.012175557310748859,\n", + " -0.023748883979310402,\n", + " 0.12668919558245592,\n", + " 0.045007819559968694,\n", + " 0.05355559543173543,\n", + " 0.032395463156901914,\n", + " 0.03065693517167532,\n", + " -0.010767365270893042,\n", + " -0.001515027881551083,\n", + " 0.018971146644564063,\n", + " -0.040644300126251934,\n", + " -0.025469175135451928,\n", + " -0.03467675673756776,\n", + " -0.04794770014432576,\n", + " -0.000829165724405532,\n", + " 0.004860955686112276,\n", + " -0.025678845276481402,\n", + " -0.039211416582605595,\n", + " 0.007419254479413182,\n", + " -0.003115551387437321,\n", + " -0.018392002312641518,\n", + " 0.012975769166242756,\n", + " 0.08243908499766621,\n", + " 0.0013311546158305894,\n", + " 0.00540275341935173,\n", + " 0.001910841357392249,\n", + " -0.007568839192669851,\n", + " -0.04557916336326874,\n", + " -0.0033894263031941534,\n", + " -0.030349938096057523,\n", + " 0.011538325144052555,\n", + " -0.021149438329792363,\n", + " -0.03652979756779197,\n", + " -0.013357356889904342,\n", + " -0.03146740365443883,\n", + " -0.045866935407240204,\n", + " -0.026850639940340423,\n", + " -0.007883191851503064,\n", + " -0.018495321178711922,\n", + " 0.032956002838202866,\n", + " 0.020311905301067208,\n", + " 0.020070249941631254,\n", + " 0.02782798413863747,\n", + " -0.02248269478297452,\n", + " -0.0178115104317718,\n", + " 0.054211660230604766,\n", + " 0.030674926221392666,\n", + " -0.01758925638711489,\n", + " 0.06150383575920919,\n", + " 0.058273745677564895,\n", + " 0.02838099110607523,\n", + " -0.026440410445188457,\n", + " 0.012853404941718976,\n", + " 0.004943482041430815,\n", + " 0.0014961306263546372,\n", + " 0.00019990337746518723,\n", + " 0.005428916209288349,\n", + " -0.004327090693844858,\n", + " -0.012575560689173412,\n", + " -0.035283577521326076,\n", + " -0.019853804765203226,\n", + " -0.00020916481017428632,\n", + " -0.015482422086749437,\n", + " -0.026525175511540933,\n", + " -0.02331329344346963,\n", + " -0.0037800472661346333,\n", + " -0.0036956453599858467,\n", + " 0.016040534484915353,\n", + " -0.027752567163409094,\n", + " 0.02860119246925412,\n", + " 0.005209672538128518,\n", + " -0.01977768096241388,\n", + " -0.004815014860951947,\n", + " -0.02320975761354358,\n", + " 0.22625603789597798,\n", + " 0.1080765105180808,\n", + " 0.018360615439868187,\n", + " -0.013923709677009479,\n", + " 0.038721291842642315,\n", + " 0.05187911571916318,\n", + " 0.026480175851855973,\n", + " 0.009770661286054945,\n", + " 0.015192817715215842,\n", + " 0.010600431924335348,\n", + " -0.011741167703749689,\n", + " -0.009124204765433465,\n", + " -0.003990208118384495,\n", + " -0.023278101228071894,\n", + " -0.018882935581973098,\n", + " 0.0043045709473984416,\n", + " -0.04958564165203899,\n", + " 0.009578577930058667,\n", + " -0.06266092434172774,\n", + " -0.043282933177165805,\n", + " -0.022160944165172832,\n", + " 0.01612983036678455,\n", + " -0.00963866844292184,\n", + " -0.02173835112030932,\n", + " 0.015593916083099355,\n", + " -0.0027184337977958647,\n", + " -0.030006250398413935,\n", + " -0.006223276820800307,\n", + " -0.019988917311276295,\n", + " -0.02153527464645532,\n", + " 0.0084453774070527,\n", + " -0.004699086242044745,\n", + " 0.04781422669238088,\n", + " 0.0063258524161621124,\n", + " 0.005355343426832958,\n", + " 0.00792289708460124,\n", + " -0.0036670370653403804,\n", + " -0.050161714589380055,\n", + " 0.02067406035189249,\n", + " 8.72555952989435e-05,\n", + " -0.001589603909960068,\n", + " -0.05461597237560003,\n", + " -0.0038793014499719294,\n", + " -0.008407275672450155,\n", + " -0.012750758155092037,\n", + " 0.05850299511153625,\n", + " 0.026512962819510266,\n", + " -0.00776473806147127,\n", + " 0.024177916443608265,\n", + " -0.08407514163188352,\n", + " 0.028871678596039084,\n", + " -0.039934648645098415,\n", + " 0.01790690503202078,\n", + " -0.01680198947666446,\n", + " -0.022835635650055357,\n", + " -0.012443071202214493,\n", + " -0.05254496084183779,\n", + " -0.04100417197150257,\n", + " 0.004612264256638945,\n", + " 0.010854142338000604,\n", + " 0.005117680710849917,\n", + " 0.02539349034546424,\n", + " -0.006606279047098894,\n", + " 0.001183319597417592,\n", + " 0.021601570664595976,\n", + " 0.04066708472115495,\n", + " 0.0042936337655347535,\n", + " 0.002825332567484418,\n", + " -0.04523503495779337,\n", + " 0.01035076924260198,\n", + " 0.00785323134657759,\n", + " ...]],\n", + " {'id': '5f90161b-d63f-4539-b9ad-b5f9830965f3'}),\n", + " ('6795a8d8-ec0d-4fb4-9c37-68d29b168889',\n", + " [[-0.007365134114790748,\n", + " -0.01747265887203649,\n", + " 0.008983527094900778,\n", + " 0.006190118394871476,\n", + " 0.024439953332427183,\n", + " 0.001786791884979593,\n", + " -0.01594509182357385,\n", + " -0.03004102324321149,\n", + " -0.010005454354522563,\n", + " 0.039394199119379254,\n", + " -0.004494658994118949,\n", + " -0.0038209438314753125,\n", + " 0.012278908666674603,\n", + " -0.036251748470398144,\n", + " -0.009040616833247177,\n", + " -0.02097737535345808,\n", + " -0.061039837447779105,\n", + " -0.015258006823576828,\n", + " -0.06736195466451533,\n", + " -0.05812469673774007,\n", + " 0.020239095297866685,\n", + " -0.007510888740895568,\n", + " -0.02154076839154778,\n", + " -0.028325208143460213,\n", + " -0.016980024267560036,\n", + " 0.03686807225670831,\n", + " 0.018482781939094948,\n", + " 0.0070111306611298765,\n", + " 0.03409109749850018,\n", + " 0.0637752741994353,\n", + " 0.0547171308554368,\n", + " 0.0034009734010742266,\n", + " -0.004412580564096741,\n", + " -0.04202860058238772,\n", + " -4.204849756924023e-05,\n", + " -0.03015411478443017,\n", + " 0.06315878731547313,\n", + " -0.020861892948931707,\n", + " 0.01853612228482781,\n", + " -0.012606052902463317,\n", + " 0.054093085878010476,\n", + " -0.020210742253772822,\n", + " 0.0176439299404581,\n", + " -0.03744341034442442,\n", + " -0.043832827583198584,\n", + " -0.006711531394704672,\n", + " 0.040152818193527715,\n", + " -0.031235609754679275,\n", + " -0.0024494137430501775,\n", + " 0.0007312077799912074,\n", + " 0.01575208057949392,\n", + " -0.0008722823259221393,\n", + " 0.013503660830232312,\n", + " 0.027502658496493495,\n", + " 0.016414510959994365,\n", + " -0.041579144227895236,\n", + " 0.003971306429594668,\n", + " -0.011756881270572802,\n", + " -0.02556271015524928,\n", + " 0.00034225648427942205,\n", + " 0.006494625417915883,\n", + " 0.04994376299471641,\n", + " -0.010470478194559985,\n", + " -0.07022824762778107,\n", + " -0.02995169577124183,\n", + " 0.04177551380090039,\n", + " 0.020494615570457116,\n", + " 0.00536861220220106,\n", + " -0.010588411697322345,\n", + " -0.018717774148122587,\n", + " -0.06961414048683215,\n", + " 0.019966479425935043,\n", + " -0.008322554399665059,\n", + " -0.011022741671283256,\n", + " -0.01975171132575511,\n", + " 0.011286429800150436,\n", + " -0.04089580213367332,\n", + " -0.01370491591926776,\n", + " 0.061594502908104524,\n", + " 0.007380549622978138,\n", + " -0.010884267131281239,\n", + " 0.022912853336331626,\n", + " -0.04784031852425661,\n", + " 0.007557216350938136,\n", + " -0.006919699136780326,\n", + " -0.05140560724957123,\n", + " -0.002336937293118506,\n", + " 0.011439182798231882,\n", + " -0.05142504923104231,\n", + " -0.03729108455098827,\n", + " -0.0010883472725246154,\n", + " 0.01588976094513365,\n", + " -0.06195989354328668,\n", + " -0.000749327141440621,\n", + " 0.03975215398433625,\n", + " 0.039036370284426074,\n", + " 0.0019139226821105323,\n", + " -0.0038353158853663997,\n", + " 0.008695953500236785,\n", + " -0.04804551019752891,\n", + " 0.03202469843563275,\n", + " 0.04460735979871127,\n", + " -0.02540962447502541,\n", + " 0.043196835702763434,\n", + " -0.015949541794738013,\n", + " -0.004584036044068052,\n", + " 0.011216729647892764,\n", + " -0.013601500887606805,\n", + " -0.03810636152538182,\n", + " -0.03509625720746716,\n", + " 0.021706247639941063,\n", + " 0.011406945064608622,\n", + " -0.00628798347290849,\n", + " -0.02446667910676589,\n", + " -0.034682993704658875,\n", + " 0.050231856463146145,\n", + " -0.0011266410490058932,\n", + " 0.029655228721087945,\n", + " -0.0677585710865254,\n", + " 0.03323794850795981,\n", + " -0.011028920848235075,\n", + " -0.0010226797328798652,\n", + " 0.02114864086173246,\n", + " 0.021683495517487394,\n", + " 0.050922104260224234,\n", + " -0.016671782678961366,\n", + " 0.027819783346978808,\n", + " 0.00980837771612835,\n", + " 0.0021065143667476113,\n", + " 0.0032679533655020853,\n", + " 0.01909109911339795,\n", + " 0.005049007526052418,\n", + " 0.01922667032985617,\n", + " 0.047514401227622696,\n", + " -0.019018316786194006,\n", + " 0.0025174694817654076,\n", + " -0.010861595630962364,\n", + " 0.011763824967768361,\n", + " 0.011476709158559407,\n", + " -0.04785298824640497,\n", + " -0.03563853095300557,\n", + " -0.001251573850436083,\n", + " 0.02291588361657045,\n", + " 0.09083775622715848,\n", + " 0.045629607693489824,\n", + " -0.010127647856752155,\n", + " -0.03961975205179753,\n", + " 0.01406541733165481,\n", + " -0.0034691565598300796,\n", + " -0.005489084838863838,\n", + " 0.019820371730481297,\n", + " 0.03467171772608213,\n", + " 0.019909317405674688,\n", + " -0.0008385257454146511,\n", + " -0.06265245065484647,\n", + " -0.008239497847076466,\n", + " -0.016922723243619004,\n", + " 0.018854670533003288,\n", + " -0.021931044392336445,\n", + " -0.0020799329245640135,\n", + " 0.0252664692177493,\n", + " 0.002194746721386316,\n", + " -0.032967198992114095,\n", + " 0.028061004774283315,\n", + " -0.00574664336875864,\n", + " 0.046441337293952424,\n", + " -0.041358602841511063,\n", + " -0.019220520807022894,\n", + " 0.01771254215724165,\n", + " -0.0005071601995164918,\n", + " 0.009003733596615599,\n", + " 0.03147393619866082,\n", + " 0.01644547171313778,\n", + " -0.007539714397503741,\n", + " 0.026961732159966466,\n", + " 0.04339929919712959,\n", + " 0.04216347863382495,\n", + " -0.0018453201955846285,\n", + " -0.015030201104840333,\n", + " -0.03128144760842025,\n", + " -0.031697335500719764,\n", + " -0.0005471136220268858,\n", + " 0.03194156311429178,\n", + " 0.044064682015807675,\n", + " -0.038683515927858826,\n", + " -0.021403636627101128,\n", + " -0.010685170306061148,\n", + " -0.02394720381824137,\n", + " -0.013274332557846772,\n", + " -0.009643634260551464,\n", + " -0.01840760504181849,\n", + " 0.04611301430695155,\n", + " -0.023123868337133206,\n", + " 0.0034846580186266903,\n", + " 0.03256386220548876,\n", + " 0.009456299000099292,\n", + " -0.031810213902961404,\n", + " -0.03100526026858634,\n", + " -0.035388861808680554,\n", + " -0.0206997016009321,\n", + " 0.0032616829094666193,\n", + " 0.033434796253626504,\n", + " 0.02182143906342921,\n", + " 0.026584639268253475,\n", + " -0.06390039975263602,\n", + " 0.017431093064748413,\n", + " 0.06688694758013149,\n", + " 0.057940552075105095,\n", + " -0.008561966925738317,\n", + " 0.031166400748760006,\n", + " 0.012188224518781507,\n", + " 0.0036932314197771284,\n", + " -0.016015577809972967,\n", + " 0.03622893333294257,\n", + " -0.0027726830195671415,\n", + " -0.03156019632274348,\n", + " -0.029192383532072378,\n", + " 0.020870809571701714,\n", + " 0.008264382285991766,\n", + " 0.006412164727771804,\n", + " -0.01795231423950227,\n", + " -0.004855507915707698,\n", + " 0.056448201145797086,\n", + " 0.0026615523569372516,\n", + " 0.02371607220477181,\n", + " -0.0181942158581509,\n", + " 0.023764000673870207,\n", + " 0.006880496392063803,\n", + " 0.019694389914607577,\n", + " 0.02860747272421344,\n", + " 0.04531495896190302,\n", + " 0.027667920899144917,\n", + " 0.013478759710875332,\n", + " -0.0023585896647851977,\n", + " -0.019195279591993852,\n", + " 0.0030540209091158383,\n", + " -0.002207717618176151,\n", + " 0.02398629350663094,\n", + " 0.060460218046698044,\n", + " -0.007289122268739008,\n", + " 0.005241814898067351,\n", + " 0.013958676405260788,\n", + " 0.1133364619972727,\n", + " 0.02989611468617641,\n", + " 0.005192295300170773,\n", + " 0.04301772111998667,\n", + " -0.00814104339343337,\n", + " -0.009203346588836616,\n", + " -0.05524967434332763,\n", + " -0.030732505392974018,\n", + " 0.0030408491520074214,\n", + " 0.034071181051132375,\n", + " 0.02100705727273924,\n", + " -0.03351262719451274,\n", + " -0.049501390267791365,\n", + " -0.04360850158993511,\n", + " 0.04855733287008641,\n", + " 0.04799705907459116,\n", + " 0.007117262751402521,\n", + " -0.08626618093637217,\n", + " -0.014486529908965482,\n", + " 0.00802232683660318,\n", + " 0.06001896105598326,\n", + " -0.010247607106487623,\n", + " -0.009703309467358508,\n", + " 0.05263894057368291,\n", + " -0.0010487016852487269,\n", + " 0.03384919773323396,\n", + " -0.036367810983618555,\n", + " -0.047688396768185634,\n", + " -0.045781785216333255,\n", + " -0.0528798728733316,\n", + " -0.05389864011594593,\n", + " 0.0016596407006421539,\n", + " -0.051668316792526425,\n", + " -0.05828577790967664,\n", + " 0.0076469163527720175,\n", + " -0.04189848943050661,\n", + " 0.09357763368306679,\n", + " -0.007914858457613602,\n", + " -0.01637592724500247,\n", + " -0.005795359988726491,\n", + " -0.008860407828157857,\n", + " 0.04327012956351228,\n", + " 7.659288143698123e-05,\n", + " -0.009326034944169429,\n", + " -0.07405313221360366,\n", + " 0.02156391528445458,\n", + " 0.015914114389988753,\n", + " 0.05992058351771014,\n", + " 0.005073811342832922,\n", + " -0.01977214301343263,\n", + " -0.0074731209775092625,\n", + " -0.020651218970493393,\n", + " -0.0028055622551767393,\n", + " 0.012385915553444324,\n", + " 0.010197289627463942,\n", + " 0.02733171362337588,\n", + " -0.020419544315977978,\n", + " 0.0026009349368479994,\n", + " 0.024004986721608758,\n", + " -0.003984698044191361,\n", + " 0.013972545265827814,\n", + " 0.03649374831831446,\n", + " -0.0049647226442722184,\n", + " 0.01780660316788406,\n", + " 0.03253626904818261,\n", + " -0.031244795117898596,\n", + " 0.03245234789270019,\n", + " 0.034327574266837475,\n", + " -0.06003682766240664,\n", + " 0.007827892194833088,\n", + " -0.011841513198209098,\n", + " 0.0371615961355966,\n", + " -0.010282349686436733,\n", + " 0.017322909280148933,\n", + " 0.0334269638595658,\n", + " 0.006820316138550289,\n", + " 0.006796719337063687,\n", + " -0.0188637521068077,\n", + " -0.0064105893527241,\n", + " -0.0332971418353405,\n", + " -0.03158039819100228,\n", + " -0.03073200683310598,\n", + " -0.001798887869435949,\n", + " 0.028310275441390385,\n", + " -0.03905429249232172,\n", + " -0.028827884373828976,\n", + " 0.023259671226392557,\n", + " -0.0594231875738248,\n", + " -0.035441293997032976,\n", + " 0.023677358753011277,\n", + " -0.0067936232617493465,\n", + " 0.0691818872278606,\n", + " 0.018727302387087585,\n", + " 0.007206797802201997,\n", + " 0.014899680355446815,\n", + " 0.03617419553687327,\n", + " -0.008765809336616819,\n", + " 0.04111774467715874,\n", + " 0.02145783138212457,\n", + " -0.0286360704147853,\n", + " -0.008638994425350224,\n", + " 0.048511595098995525,\n", + " 0.02452947355616739,\n", + " 0.04603089834593556,\n", + " 0.05560357400757359,\n", + " -0.00693709266734379,\n", + " -0.0014362782345585887,\n", + " 0.017716851271342725,\n", + " -0.0315976605947603,\n", + " 0.005147390624472765,\n", + " -0.015872481860933946,\n", + " -0.043797212986826005,\n", + " 0.04798147954206057,\n", + " -0.007417430079536104,\n", + " -0.0071950316039780535,\n", + " -0.014909180793675677,\n", + " -0.007738251964581899,\n", + " 0.056437444114294875,\n", + " 0.009445137004540704,\n", + " -0.003737740861676404,\n", + " 0.010848509824463167,\n", + " 0.0012828174760696814,\n", + " -0.02364882037067651,\n", + " -0.010531531391188169,\n", + " 0.04719776196332856,\n", + " 0.01613370869796189,\n", + " 0.0006798556502376672,\n", + " -0.030371837176170154,\n", + " 0.0036028783323168523,\n", + " 0.01233709190064235,\n", + " -0.012823308241836215,\n", + " 0.007483236738697934,\n", + " -0.0327466316583762,\n", + " -0.021969830126010927,\n", + " 0.048143921096685395,\n", + " -0.042307030514936596,\n", + " 0.0018619556927427852,\n", + " -0.02882498197697638,\n", + " 0.016565114961172505,\n", + " -0.007453820779792474,\n", + " 0.007199880515705767,\n", + " 0.01258957262608192,\n", + " 0.07164938419140321,\n", + " -0.013074239539581806,\n", + " -0.029122579590399798,\n", + " 0.006908751670235587,\n", + " 0.0011880792858295598,\n", + " 0.028123302517199182,\n", + " 0.01718671532720127,\n", + " 0.013049826786489615,\n", + " -0.0033064180006741987,\n", + " -0.03617227543269748,\n", + " -0.010513665711455995,\n", + " -0.04066600495553828,\n", + " 0.03542041379081246,\n", + " 0.05077292551011867,\n", + " 0.009991323240344654,\n", + " -0.014003788659788612,\n", + " 0.06446020278899937,\n", + " -0.02854880575743693,\n", + " -0.04670477336281197,\n", + " 0.0437590147753752,\n", + " -0.012958023195621445,\n", + " -0.011798523066539652,\n", + " -0.0362524342218895,\n", + " 0.024405000393574645,\n", + " -0.00462875862494437,\n", + " 0.03478368426417851,\n", + " 0.0015276986386136848,\n", + " 0.014386126623719254,\n", + " -0.031023864521208522,\n", + " 0.0002064532330313196,\n", + " 0.02955507564246694,\n", + " 0.02525464649136188,\n", + " -0.020927141276642806,\n", + " -0.01175426151453759,\n", + " 0.0011038862623150097,\n", + " 0.0015825805351652874,\n", + " -0.008073794339411685,\n", + " 0.005705279116807546,\n", + " -0.0522364266888472,\n", + " 0.03946970962548934,\n", + " -0.004384437415560705,\n", + " -0.0215920551895714,\n", + " -0.012189686837502258,\n", + " -0.04029272261805833,\n", + " -0.04355751133309693,\n", + " 0.013714181904621876,\n", + " 0.029126542121990374,\n", + " -0.007453426472684946,\n", + " -0.005946034418436171,\n", + " -0.03034023885947802,\n", + " -0.029345689764802927,\n", + " 0.025450654654797268,\n", + " 0.01628688519392381,\n", + " -0.0009772593535265345,\n", + " 0.01875329051522748,\n", + " -0.025045549741353684,\n", + " 0.00599855788587224,\n", + " 0.0242279968133618,\n", + " -0.03250333444277354,\n", + " -0.03413467793246691,\n", + " 0.02849394934489345,\n", + " -0.0015227060897492685,\n", + " 0.004677792636649773,\n", + " 0.03373589675980305,\n", + " -0.017823950827232902,\n", + " 0.0028244301514466137,\n", + " 0.03466732150300783,\n", + " -0.05821716939965791,\n", + " 0.04054034192143892,\n", + " -0.029127194512598365,\n", + " 0.008212336527871632,\n", + " -0.020042201217639763,\n", + " -0.004278132497378416,\n", + " -0.00260458702688506,\n", + " 0.004985339206845045,\n", + " -0.023599696469924266,\n", + " -0.015285681529708972,\n", + " -0.03259438000023641,\n", + " 0.034878977774122945,\n", + " 0.01234772660890556,\n", + " -0.0414480878509855,\n", + " 0.0029109557725593713,\n", + " 0.025238143974391575,\n", + " 0.011236555279522522,\n", + " -0.01025619475388002,\n", + " 0.009780445389841345,\n", + " 0.004733987191292596,\n", + " -0.005371749978619606,\n", + " -0.035077156248359345,\n", + " -0.008154867772822578,\n", + " 0.030824273769576416,\n", + " -0.01326367097553863,\n", + " 0.029288344259684077,\n", + " 0.002913968445665305,\n", + " -0.008901881893016689,\n", + " -0.02430891178257672,\n", + " -0.03978876199368008,\n", + " 0.02077518615968846,\n", + " -0.027542769398590457,\n", + " -0.01305708463200351,\n", + " -0.04200309433367418,\n", + " -0.01534382584264613,\n", + " -0.024650393784681954,\n", + " 0.0030023330854734567,\n", + " -0.005433496340268779,\n", + " -0.003450588448164871,\n", + " 0.007801511149622471,\n", + " 0.00420842539493671,\n", + " -0.043789988502195475,\n", + " 0.01730416231708125,\n", + " -0.015462421002928524,\n", + " -0.044885823092143506,\n", + " -0.0281333256092674,\n", + " 0.003968636632234411,\n", + " -0.0073239223035427995,\n", + " 0.001783692450409635,\n", + " 0.0523793966078824,\n", + " -0.04538586937287381,\n", + " -0.01058530079494873,\n", + " 0.028999752231386303,\n", + " 0.038075676926217356,\n", + " -0.021589881172005568,\n", + " -0.049148936241824474,\n", + " -0.04946921925593483,\n", + " -0.044161421164031184,\n", + " -0.023431737395867656,\n", + " -0.00048689560584678674,\n", + " 0.0190809462845611,\n", + " 0.016911467652248758,\n", + " 0.022139427590117153,\n", + " 0.028705283127469387,\n", + " 0.01823744970960714,\n", + " -0.015979533228881487,\n", + " -0.019695361086989924,\n", + " -0.009596109828818243,\n", + " 0.011379196223180114,\n", + " -0.001256387432061624,\n", + " -0.045941724704705846,\n", + " -0.012601819777041015,\n", + " 0.04751471630263224,\n", + " -0.05619540719873038,\n", + " -0.026305803444957015,\n", + " 0.0035026026987840493,\n", + " -0.030588510553466605,\n", + " -0.060440238584328336,\n", + " -0.01913449420912378,\n", + " -0.023114938740686337,\n", + " -0.02398753897960983,\n", + " 0.050742259444778295,\n", + " -0.05470231862322356,\n", + " -0.004760033237632775,\n", + " -0.014106399323484035,\n", + " 0.026902748264798004,\n", + " 0.0026270361213148485,\n", + " -0.02168795660894601,\n", + " -0.015787785991604537,\n", + " -0.007604223225633625,\n", + " 0.023113732188738037,\n", + " -0.004077925496095294,\n", + " -0.03891033286708008,\n", + " -0.02554847617834767,\n", + " 0.017942546914206502,\n", + " -0.005174407379849692,\n", + " 0.010653515461205538,\n", + " 0.022077815598692634,\n", + " -0.0049182055258783245,\n", + " 0.016892920854481255,\n", + " 0.006003501320102816,\n", + " 0.031622584881397395,\n", + " 0.0236181468918065,\n", + " -0.06353393415565661,\n", + " -0.01026342479865778,\n", + " 0.03500945960248587,\n", + " -0.01133605874760915,\n", + " -0.002966992326351821,\n", + " 0.02751133973969755,\n", + " -0.018118262393645047,\n", + " 0.04761668198924931,\n", + " -0.002444572476524901,\n", + " -0.04558593829716746,\n", + " -0.029192544776341964,\n", + " -0.020243052269310034,\n", + " -0.005100888796483746,\n", + " 0.026170300803647933,\n", + " 0.03499775734595504,\n", + " -0.014990159704583709,\n", + " -0.03743225668908668,\n", + " 0.024877088400664216,\n", + " 0.059800098834061705,\n", + " 0.009437213794741954,\n", + " -0.019626370780194923,\n", + " -0.040442094119934435,\n", + " 0.002295206998141622,\n", + " -0.010262366517302205,\n", + " -0.06058369779631953,\n", + " -0.004799924050568663,\n", + " -0.0021938621946315903,\n", + " -0.0025512499245714123,\n", + " 0.0028852002439118127,\n", + " 0.006528363001253678,\n", + " 0.0401339618808979,\n", + " -0.05145267945599664,\n", + " 0.01500654082500622,\n", + " 0.07269048839881158,\n", + " 0.01775182830416658,\n", + " 0.004057636055517668,\n", + " 0.009246295019401857,\n", + " 0.015832189327360895,\n", + " 0.01982691787715011,\n", + " -0.014901308551893177,\n", + " 0.01957358459580237,\n", + " -0.041519064983723014,\n", + " -0.012393684932503122,\n", + " 0.00454726262034421,\n", + " -0.01678963555958893,\n", + " -0.028841667978805184,\n", + " -0.009005800118001705,\n", + " -0.004444441597745359,\n", + " 0.05211816235732482,\n", + " -0.0016173747781490504,\n", + " 0.016436291910065688,\n", + " 0.024742518010706892,\n", + " -0.024197276999931565,\n", + " -0.012608054555465107,\n", + " 0.040843622005328525,\n", + " -0.008103244585891713,\n", + " -0.009280629855220974,\n", + " 0.06267141446365601,\n", + " 0.03380092824177229,\n", + " -0.003825923406662865,\n", + " -0.007157317125336009,\n", + " 0.021774146304497127,\n", + " -0.05120120511720519,\n", + " -0.03338909184341898,\n", + " 0.04529277397446652,\n", + " -0.002566781269159369,\n", + " -0.01144477908641605,\n", + " 0.03042835792611697,\n", + " 0.005889150405537178,\n", + " 0.008392973664297447,\n", + " -0.042918201898149565,\n", + " -0.048856505858557525,\n", + " -0.008349370063050601,\n", + " -0.006707910812168565,\n", + " -0.03529164078103069,\n", + " -0.04121584050130568,\n", + " 0.03504027393841897,\n", + " 0.03132815655189348,\n", + " -0.03071099503673442,\n", + " 0.002036326079897952,\n", + " -0.029127138911126095,\n", + " -0.026257326374665536,\n", + " 0.022847030460073716,\n", + " -0.02258100150922389,\n", + " -0.021816088348414246,\n", + " 0.04142049469367935,\n", + " 0.06555991836171197,\n", + " -0.02317223605786255,\n", + " 0.013399190297289395,\n", + " -0.05576295006769383,\n", + " 0.020964173710558316,\n", + " 0.0005068945287317924,\n", + " -0.045607400465464416,\n", + " 0.03088199180455949,\n", + " -0.0353321483069632,\n", + " 0.008408794136541218,\n", + " 0.0030019017107177468,\n", + " 0.0009198408714753267,\n", + " 0.018564271456856674,\n", + " -0.03575223596380166,\n", + " 0.0027773563233115966,\n", + " 0.06766388548601114,\n", + " 0.006913685374208516,\n", + " -0.04811510100022445,\n", + " -0.010075597465176,\n", + " 0.042249709103789064,\n", + " -0.012895435398358556,\n", + " 0.021149506391317494,\n", + " -0.05020591652294889,\n", + " -0.0195458839423165,\n", + " 0.013135234354664104,\n", + " -0.0015427432386464612,\n", + " -0.022345069635314855,\n", + " -0.032111540521791854,\n", + " -0.0004054637746125404,\n", + " -0.0614780289439893,\n", + " -0.03438016213931226,\n", + " 0.014524218440253799,\n", + " -0.04014341042441931,\n", + " 0.005539748900398013,\n", + " 0.04354012289933509,\n", + " 0.012137037803407975,\n", + " -0.020132679640085454,\n", + " -0.02856692071710312,\n", + " 0.035651938321352744,\n", + " -0.053068662112283685,\n", + " -0.02291419889196061,\n", + " -0.007094720061161075,\n", + " 0.020132885365532857,\n", + " -0.03511290799503019,\n", + " 0.013132804570325823,\n", + " 0.010045194580137714,\n", + " -0.029742828840652356,\n", + " -0.04194338205921896,\n", + " -0.0038603245009031055,\n", + " 0.004683774428375027,\n", + " 0.019905623614533425,\n", + " -0.04162204632390025,\n", + " 0.045458933414203935,\n", + " -0.03056826235064758,\n", + " 0.038363077229626105,\n", + " -0.02303069324328233,\n", + " -0.0059766819499524546,\n", + " -0.018422515503299413,\n", + " 0.025676426286339815,\n", + " 0.0251353794799562,\n", + " -0.026516025198087517,\n", + " 0.02836911477273094,\n", + " 0.0006493464273836604,\n", + " -2.7212171384682453e-05,\n", + " -0.0009004146437547217,\n", + " -0.007910202761002032,\n", + " -0.01962738458037268,\n", + " 0.027087046758172924,\n", + " 0.014059909079135072,\n", + " 0.0035288505321340157,\n", + " 0.02007846449785551,\n", + " -0.006015083106777058,\n", + " 0.06349459796740663,\n", + " -0.05746095230911159,\n", + " -0.031478680857628026,\n", + " -0.041575930462797915,\n", + " -0.005479809586597676,\n", + " -0.024624355615217013,\n", + " -0.014070415904012055,\n", + " 0.07226314289645923,\n", + " 0.061962043466881195,\n", + " 0.010865079989891405,\n", + " 0.0439092240061002,\n", + " -0.015560778153995146,\n", + " 0.010488307733335181,\n", + " 0.032403225845330355,\n", + " 0.04103717814383672,\n", + " 0.01795692545493602,\n", + " -0.0024791306449243092,\n", + " 0.039260470165035644,\n", + " -0.0430810956780822,\n", + " -0.019625288404868033,\n", + " 0.009895216095522637,\n", + " -0.0632319217853348,\n", + " 0.021005913735792848,\n", + " -0.02780452630298739,\n", + " 0.007851784147468333,\n", + " -0.0038084413770893685,\n", + " 0.0032052782275931698,\n", + " 0.029315813240368815,\n", + " 0.0014083583027664451,\n", + " 0.04634536915281109,\n", + " -0.023136382375159206,\n", + " -0.009672786112463633,\n", + " -0.03231305508436457,\n", + " -0.05155273245196756,\n", + " -0.013369182182704237,\n", + " -0.024934871304031606,\n", + " -0.03199428813706483,\n", + " 0.023822673200793947,\n", + " -0.0014393052713781926,\n", + " 0.011610165665688881,\n", + " -0.019234539791565055,\n", + " -0.009597624042246448,\n", + " 0.004167255284792965,\n", + " 0.003129183906024227,\n", + " 0.04306698031765477,\n", + " 0.017041317477210195,\n", + " 0.031618162710969365,\n", + " -0.030207267938539718,\n", + " 0.021244536720959845,\n", + " -0.037161447865003874,\n", + " -0.038353591618456515,\n", + " -0.0010252876736022195,\n", + " 0.02957665828062049,\n", + " -0.02567103294352944,\n", + " -0.053572374383419115,\n", + " 0.02924922677055837,\n", + " 0.023838708665397168,\n", + " -0.0015352766559387455,\n", + " -0.027931373648446146,\n", + " -0.061472557759117745,\n", + " 0.014323852974774702,\n", + " 0.031829574335606486,\n", + " -0.010789448087233523,\n", + " -0.005550075020490113,\n", + " 0.017144704708135016,\n", + " -0.024188238053922558,\n", + " 0.009901478674682865,\n", + " -0.02222571366155354,\n", + " -0.0052076593769963125,\n", + " 0.047858956137762164,\n", + " 0.025828429591236796,\n", + " 0.04367308455336135,\n", + " -0.008284892742421673,\n", + " -0.020237647806205204,\n", + " 0.00029449713632585263,\n", + " 0.04152151144850298,\n", + " -0.005399818528513676,\n", + " 0.022268220987105423,\n", + " -0.0007560878171232844,\n", + " -0.016931059757694975,\n", + " -0.029896876426346534,\n", + " 0.026040445418539268,\n", + " -0.037987696863259084,\n", + " 0.04252790180338958,\n", + " -0.002969497404350472,\n", + " 0.0026656587573373343,\n", + " -0.029229768108645606,\n", + " 0.013155211963651407,\n", + " 0.0071631172855851745,\n", + " -0.034662954934052075,\n", + " 0.009382015433144004,\n", + " 0.05635202171906111,\n", + " 0.014937152040993272,\n", + " 0.010470494875001667,\n", + " 0.035900958781834594,\n", + " -0.02957799271595502,\n", + " -0.008836476954492021,\n", + " -0.013682832867863762,\n", + " -0.02573105287946457,\n", + " -0.0007664676853052476,\n", + " -0.006451536130287552,\n", + " -0.010947400749663598,\n", + " -0.006378700054993744,\n", + " -0.05212516443606626,\n", + " 0.04599737436492041,\n", + " -0.004036841104887968,\n", + " 0.020847660825412503,\n", + " -0.029209440210382992,\n", + " 0.032473394903337525,\n", + " 0.023142895160944657,\n", + " 0.029968339145275226,\n", + " -0.0071447877969051336,\n", + " -0.014398140248494808,\n", + " 0.06886879904426223,\n", + " 0.017080700000020398,\n", + " 0.011191821115006148,\n", + " 0.026713423398329703,\n", + " 0.0012161031228730117,\n", + " -0.020077345054880437,\n", + " 0.02121588898906294,\n", + " 0.015675637822032074,\n", + " 0.04010123485431865,\n", + " -0.025607098663946365,\n", + " 0.03420813118410294,\n", + " -0.05607832161842017,\n", + " -0.08627097007651718,\n", + " 0.008282487052054708,\n", + " 0.013523982241656496,\n", + " -0.0428656399730285,\n", + " 0.007440947648924718,\n", + " -0.026916687553896576,\n", + " -0.05490372939638137,\n", + " -0.037112477794991584,\n", + " 0.036102992291481846,\n", + " -0.026169185067437678,\n", + " 0.016154733468010312,\n", + " 0.019294346588523144,\n", + " -0.06134574933468958,\n", + " -0.033976536225031106,\n", + " 0.05556163567042129,\n", + " -0.06498929898560935,\n", + " 0.05915262710298481,\n", + " -0.010336606529770878,\n", + " 0.017016107769682105,\n", + " 0.0003308241265619115,\n", + " 0.06917183448167383,\n", + " 0.03223391565549754,\n", + " 0.033771377912642175,\n", + " 0.01309249998976711,\n", + " 0.019550686056138383,\n", + " -0.02006476614847012,\n", + " -0.011775209369216036,\n", + " 0.04509423965080758,\n", + " -0.020890905797163194,\n", + " 0.008141337154545207,\n", + " -0.02334982160015184,\n", + " 0.01873835039962802,\n", + " -0.04544882135978008,\n", + " -0.019109010200999147,\n", + " -0.023183628799531068,\n", + " -0.034279130557429364,\n", + " 0.014922239726129941,\n", + " 0.0028189610516302665,\n", + " -0.019781519275040085,\n", + " 0.0041173589869217455,\n", + " 0.02448144685780131,\n", + " 0.007965765312243364,\n", + " 0.02465480112805071,\n", + " 0.026117290359983884,\n", + " -0.02366351583979798,\n", + " -0.03757948938766273,\n", + " -0.00540365132333562,\n", + " -0.013046630628525183,\n", + " 0.010482264779990428,\n", + " -0.04122383599301838,\n", + " -0.050800044201528095,\n", + " 0.020682552253501033,\n", + " -0.06644604273207441,\n", + " -0.06541357528669228,\n", + " -0.009769882963492089,\n", + " -0.01747145788023542,\n", + " -0.010598286518797838,\n", + " 0.040619488763835594,\n", + " -0.00780163161947906,\n", + " 0.02090578475114316,\n", + " 0.05000852758961875,\n", + " -0.03773688232860527,\n", + " -0.009659626170668073,\n", + " 0.04910962970769304,\n", + " 0.03457791804235939,\n", + " -0.03878935147694602,\n", + " 0.032688287180138866,\n", + " -0.0021957941141202385,\n", + " 0.012969893183260296,\n", + " -0.01093520827348557,\n", + " 0.0018587020799236732,\n", + " 0.007475878347188348,\n", + " -0.012122704670547478,\n", + " -0.046727032485544835,\n", + " -0.010951418882726448,\n", + " -0.033072493353537846,\n", + " -0.037911900936258,\n", + " -0.023670156508969656,\n", + " 0.03604798390158082,\n", + " -0.012726987958037155,\n", + " 0.000324146853087656,\n", + " 0.00370813400438281,\n", + " -0.06637786050000977,\n", + " -0.006218905130449059,\n", + " -0.02537962377396989,\n", + " 0.03101327800088795,\n", + " 0.010889427874599279,\n", + " 0.031115962799879736,\n", + " -0.01345025932287995,\n", + " 0.027924145457050797,\n", + " 0.04113564093770066,\n", + " -0.056385927496852534,\n", + " 0.27398602565195485,\n", + " 0.09438956344882957,\n", + " 0.013512541312045342,\n", + " -0.009840164151135002,\n", + " -0.0022644357533495338,\n", + " 0.03355052145124846,\n", + " 0.07539792424903163,\n", + " -0.03855779729228719,\n", + " -0.0021700856150513098,\n", + " 0.03877354953852634,\n", + " 0.004323377268748488,\n", + " -0.014588935773904703,\n", + " 0.03877592928153958,\n", + " -0.009615241368735808,\n", + " -0.008629000060709347,\n", + " 0.019054242750811305,\n", + " -0.022115902607198902,\n", + " -0.00889804817150354,\n", + " 0.005208379416062234,\n", + " -0.07443093309739691,\n", + " -0.07669701928778172,\n", + " -0.010212570765426676,\n", + " -0.03644253194882238,\n", + " -0.008352230758798993,\n", + " 0.010466478595321224,\n", + " -0.041837446427481675,\n", + " -0.008899988662885829,\n", + " 0.007186374454745315,\n", + " -0.03877524353004822,\n", + " 0.0002258289705608276,\n", + " -0.004813729896133781,\n", + " -0.0018826655036180676,\n", + " 0.01331918811891094,\n", + " -0.007862470750438997,\n", + " -0.016358809405072356,\n", + " -0.002815129415172328,\n", + " 0.02419134246945774,\n", + " -0.011032888013281677,\n", + " -0.02224093549127918,\n", + " 0.008014513903057772,\n", + " 0.019444270398357152,\n", + " -0.02696483101535442,\n", + " 0.011932722780015163,\n", + " -0.009901862324841542,\n", + " -0.020235486762316235,\n", + " 0.05131394266238374,\n", + " -0.0007078401029049266,\n", + " -0.012931959078802777,\n", + " 0.018155337455355965,\n", + " -0.04776198717011993,\n", + " 0.07342305632981824,\n", + " -0.031147896578787908,\n", + " 0.04664579873455556,\n", + " -0.02113459963660139,\n", + " -0.02992916049453002,\n", + " 0.029539822305240344,\n", + " -0.0020377376622752554,\n", + " -0.04055153264442485,\n", + " -0.010206309112957652,\n", + " 0.01491180703654932,\n", + " 0.0013045702778956647,\n", + " -0.00298464255037894,\n", + " 0.015531730091497884,\n", + " -0.01615756728971377,\n", + " -0.0029312076821432142,\n", + " 0.042983396477770795,\n", + " -0.01666605202055254,\n", + " 0.07646299639775402,\n", + " -0.0056801074036188426,\n", + " 0.04789224288582895,\n", + " -0.031005384445207746,\n", + " ...]],\n", + " {'id': '6795a8d8-ec0d-4fb4-9c37-68d29b168889'}),\n", + " ('b233b2e0-14a6-491e-8b5e-1bb6eb51926b',\n", + " [[0.013624068466878676,\n", + " 0.01187847173486192,\n", + " -0.011927801286999381,\n", + " -0.024716426741780703,\n", + " -0.008513568310992316,\n", + " -0.03759907075759918,\n", + " -0.01260073526059503,\n", + " -0.022373602966028763,\n", + " -0.01604482432316512,\n", + " 0.05303123466372812,\n", + " 0.016015717314042742,\n", + " 0.02132950791494453,\n", + " -0.013708253050948395,\n", + " -0.009198645492350329,\n", + " -0.025014010919138015,\n", + " 0.015312258859387706,\n", + " -0.044871810972627046,\n", + " -0.008317986059685884,\n", + " -0.02668064197963924,\n", + " -0.025730382042055678,\n", + " 0.02601890425082826,\n", + " -0.00042783628089458864,\n", + " -0.03734297120231212,\n", + " -0.0018024784200630774,\n", + " -0.03253641134120464,\n", + " 0.021449071545437303,\n", + " 0.018295209224999644,\n", + " -0.005204392172095108,\n", + " 0.05555349221181086,\n", + " 0.059169827470316946,\n", + " 0.05305011956845633,\n", + " -0.017403982025221816,\n", + " -0.0037539837586373953,\n", + " -0.04681969768115007,\n", + " -0.03383304570649747,\n", + " -0.031925826599072026,\n", + " 0.037938526835159324,\n", + " -0.0680492997280069,\n", + " -0.0023138144354568593,\n", + " -0.04851626805904105,\n", + " 0.04039481461081589,\n", + " 0.016594817023431872,\n", + " -0.014600660769595271,\n", + " -0.0671013311860411,\n", + " -0.039046899864722144,\n", + " -0.035653117042562465,\n", + " -0.0013014927524801735,\n", + " -0.07681180040095108,\n", + " 0.01869699839743396,\n", + " -0.02221562576111357,\n", + " -0.01511113751163239,\n", + " -0.009357060933047124,\n", + " 0.011655920147809657,\n", + " -0.0024080015813828693,\n", + " -0.021574959020173926,\n", + " -0.02408841618227026,\n", + " 0.010082606980671515,\n", + " 0.008218707819975324,\n", + " 0.006134228283949942,\n", + " 0.025551321959151052,\n", + " 0.06550039812383793,\n", + " 0.03170082819557247,\n", + " 0.0011970820158952323,\n", + " -0.07654400776380137,\n", + " -0.006366187926953217,\n", + " 0.0331372325977843,\n", + " 0.02330389939531311,\n", + " -0.011474899894005918,\n", + " -0.01016914071236123,\n", + " -0.010774725659830463,\n", + " -0.06117733462819778,\n", + " -0.0030914587341490016,\n", + " 0.007468618020698635,\n", + " -0.011286185884493741,\n", + " -0.01785568421487492,\n", + " 0.02493461550478238,\n", + " -0.05513273830099819,\n", + " -0.009120491746042575,\n", + " 0.020236203062198043,\n", + " 0.00713509681066146,\n", + " 0.002206980874984375,\n", + " 0.002097801564068497,\n", + " -0.032020111838472406,\n", + " 0.005536543046020618,\n", + " -0.02804066935890594,\n", + " -0.004005495851571092,\n", + " 0.02565366190427379,\n", + " 0.024012856180297814,\n", + " -0.039596168966372754,\n", + " -0.005722073901057112,\n", + " 0.05113330683430801,\n", + " 0.012731500231637376,\n", + " -0.022571264289415364,\n", + " 0.025662788419213947,\n", + " 0.03856435810587985,\n", + " 0.03000264415879573,\n", + " 0.004755783536384823,\n", + " 0.04122438135668666,\n", + " 0.022151981356071184,\n", + " 0.022120397805206252,\n", + " 0.021261479453497798,\n", + " 0.07063167711040788,\n", + " 0.009637440958799861,\n", + " 0.043774906811271184,\n", + " -0.05048322991418197,\n", + " -0.03554046326927732,\n", + " 0.01333423579723203,\n", + " 0.006071159700341522,\n", + " -0.03221349271934101,\n", + " -0.040191051958252844,\n", + " 0.013920889128794783,\n", + " 0.02634043003028524,\n", + " 0.008895343866772551,\n", + " 0.020623177147472853,\n", + " -0.03686182908022969,\n", + " 0.04336900875373725,\n", + " 0.006879601103515331,\n", + " -0.005399971026224938,\n", + " -0.08342956836437003,\n", + " 0.03790441578490281,\n", + " -0.018615306491695655,\n", + " -0.0013875964228444815,\n", + " -0.014595153946324102,\n", + " -0.004037146921320981,\n", + " 0.02362304205538408,\n", + " -0.005651162510616859,\n", + " -0.013673015836648459,\n", + " 0.030193154425435995,\n", + " -0.02410816057267867,\n", + " 0.009073287977163554,\n", + " -0.021911515607504616,\n", + " 0.01645838301393563,\n", + " 0.013580797779210143,\n", + " 0.04122303607475243,\n", + " -0.023160963189801438,\n", + " -0.006047004925410105,\n", + " -0.01722050221555853,\n", + " -0.012298667659417777,\n", + " 0.016737026233150806,\n", + " -0.01930578603314434,\n", + " -0.012037517351818308,\n", + " -0.02121672504894852,\n", + " 0.02096605078469419,\n", + " 0.10653630072549207,\n", + " -0.016543923920763533,\n", + " -0.007317829241472398,\n", + " -0.06514526407626146,\n", + " 0.009332112068085086,\n", + " 0.007047882794357278,\n", + " 0.004430046956530179,\n", + " 0.02426157197224133,\n", + " 0.007098968686797757,\n", + " 0.010119006878663345,\n", + " -0.0004784409420371001,\n", + " -0.044017723406046046,\n", + " -0.07929365532771117,\n", + " -0.02907005463530531,\n", + " 0.025846825365839893,\n", + " -0.011003967986199546,\n", + " 0.022101655581895308,\n", + " 0.012845247046494085,\n", + " 0.03577648551084901,\n", + " -0.007901314705854173,\n", + " 0.06300462836171775,\n", + " -0.049471224593276304,\n", + " 0.041692152191665256,\n", + " -0.03753777210300019,\n", + " -0.02342831928975785,\n", + " -0.032351598139725234,\n", + " -0.02735317763427835,\n", + " 0.0005674915569415139,\n", + " 0.025493620914573138,\n", + " -0.014012742839042578,\n", + " -0.02081612828550136,\n", + " 0.001999308284476441,\n", + " 0.023083101600681416,\n", + " 0.01855820674737622,\n", + " 0.01020144871113645,\n", + " 0.010863840396443232,\n", + " 0.0012333870052657547,\n", + " -0.014277140847473162,\n", + " -0.048247082579484686,\n", + " 0.014111054582009988,\n", + " 0.08261387908491674,\n", + " -0.006925182720567093,\n", + " -0.03496195466861687,\n", + " 0.0024499779447661996,\n", + " 0.0011168141641236028,\n", + " 0.0018321387443244379,\n", + " 0.013676522572195444,\n", + " 0.007100870256400495,\n", + " 0.058804861993655505,\n", + " -0.029089887352305364,\n", + " 0.03846707995389348,\n", + " 0.009798707479959536,\n", + " 0.03302437499390306,\n", + " -0.030760272292951335,\n", + " 0.010348965160001212,\n", + " 0.023278223534558336,\n", + " -0.03376540792035992,\n", + " 0.0019270161540711822,\n", + " 0.040655891029824275,\n", + " -0.012571168783337288,\n", + " 0.03389348147823966,\n", + " -0.017504429742977473,\n", + " -0.00034929599958663374,\n", + " 0.014863350847489406,\n", + " 0.0655239473520401,\n", + " -0.0837603174783003,\n", + " 0.016013792813497945,\n", + " -0.001133167216827712,\n", + " 0.01635944364259108,\n", + " -0.02104125238453517,\n", + " 0.017110467665433202,\n", + " -0.0010590640728085018,\n", + " -0.04279893873975513,\n", + " -0.025719470310811388,\n", + " -0.028607437317231317,\n", + " 0.021988179691872577,\n", + " -0.010634052823229594,\n", + " 0.014652656256070824,\n", + " 0.009634172874909136,\n", + " 0.010842362596673863,\n", + " 0.018790607598690626,\n", + " -0.024474464536921137,\n", + " -0.01211516746669488,\n", + " -0.019077190019623706,\n", + " 0.03370381710857319,\n", + " 0.01044118576532198,\n", + " 0.04013249482517133,\n", + " 0.016855174939589018,\n", + " 0.010394570557289184,\n", + " -0.00518316364015888,\n", + " 0.042039934749075886,\n", + " 0.0037825794926812342,\n", + " 0.00270409312427086,\n", + " 0.03226679781739648,\n", + " 0.04286710988261913,\n", + " 0.019707174338739673,\n", + " -0.02197069442390422,\n", + " 0.03152197703902884,\n", + " 0.018963630898774238,\n", + " 0.08197984363270339,\n", + " 0.03968900700854086,\n", + " 0.01169453330635742,\n", + " 0.05128862914570906,\n", + " 0.04271716869895499,\n", + " 0.017328275944655504,\n", + " -0.034571513764620136,\n", + " -0.029354350756385525,\n", + " 0.029076288454369218,\n", + " 0.09632441503854139,\n", + " -0.023413906768025627,\n", + " 0.001695901346424013,\n", + " -0.03197549332098719,\n", + " -0.0037170640926255235,\n", + " -0.011180684866543228,\n", + " 0.03800437091205124,\n", + " -0.019159872202341374,\n", + " -0.0996672504032071,\n", + " -0.04789670457915045,\n", + " 0.007988429354742758,\n", + " 0.026260980261103982,\n", + " -0.024936368447908797,\n", + " -0.012602629186550413,\n", + " 0.013907783670760019,\n", + " 0.0068612911709269395,\n", + " 0.04204303297413653,\n", + " -0.016318274957944776,\n", + " -0.08013699083034816,\n", + " -0.029987458779386647,\n", + " -0.038121451206448255,\n", + " -0.044145416480146416,\n", + " -0.005667167034234,\n", + " -0.022982484024095685,\n", + " -0.051473381198009635,\n", + " 0.030452481298494562,\n", + " 0.00032174457886247575,\n", + " 0.016789251029046968,\n", + " 0.0034319564709846062,\n", + " 0.01514093584619319,\n", + " 0.008338976364612926,\n", + " -0.025661038873264132,\n", + " 0.03792527105205994,\n", + " 0.006872930747258119,\n", + " -0.0020871418608633375,\n", + " -0.03617139030490022,\n", + " 0.042315428785581394,\n", + " -0.016586947463834302,\n", + " 0.021533172049386134,\n", + " 0.016063865498017003,\n", + " -0.03824085856681735,\n", + " -0.042598209765897295,\n", + " -0.027651405576601663,\n", + " -0.013065910652650017,\n", + " 0.019067885152911968,\n", + " -0.0005557029294869399,\n", + " 0.0035992198581402284,\n", + " -0.03593468183365501,\n", + " -0.03817247340182745,\n", + " -0.03542405561584742,\n", + " -0.05597772822864015,\n", + " -0.001703337129034269,\n", + " -0.008168331937444576,\n", + " -0.016848130893905633,\n", + " 0.016432010731977528,\n", + " 0.016156166786282056,\n", + " -0.0382411643127115,\n", + " 0.0018274264357309646,\n", + " 0.03830663130300063,\n", + " -0.05656794349986966,\n", + " 0.010809778577299764,\n", + " -0.035043058862803095,\n", + " 0.01991359867917006,\n", + " -0.03366145091917483,\n", + " 0.06522534232311403,\n", + " 0.032833423094304585,\n", + " 0.01806541230955035,\n", + " 0.01791989085264487,\n", + " -0.038431209166157346,\n", + " -0.002823622358350541,\n", + " -0.03811354937367297,\n", + " 0.025574714917229565,\n", + " -0.0306163475090486,\n", + " -0.0284009501290338,\n", + " 0.008164808216014582,\n", + " 0.033570776878501934,\n", + " -0.07320059487911339,\n", + " 0.028200098856818307,\n", + " -0.04705333850192405,\n", + " -0.07779554121253156,\n", + " 0.0102212780309599,\n", + " -0.0075790364517799415,\n", + " 0.02510163939098759,\n", + " 0.028107051888289207,\n", + " -0.0016582542599723272,\n", + " -0.010434705650954456,\n", + " -0.008402276805524403,\n", + " -0.012938786605630418,\n", + " 0.024418334686521454,\n", + " 0.03414515790959146,\n", + " -0.030057630859268977,\n", + " 0.01790205907266315,\n", + " 0.004365888303879726,\n", + " 0.004625751506194268,\n", + " 0.0179759085962167,\n", + " 0.045230678517288646,\n", + " -0.0293060666853472,\n", + " 0.004460901813014072,\n", + " 0.005112991984923623,\n", + " 0.006340774922737988,\n", + " 0.039304752363135624,\n", + " -0.03074628951405921,\n", + " -0.02735045140005558,\n", + " 0.05625306939219859,\n", + " -0.020599896296222192,\n", + " -0.030399689174105935,\n", + " 0.003882173669815741,\n", + " -0.0018182593669951262,\n", + " 0.011782052219261594,\n", + " 0.0006752703818033888,\n", + " 0.006667661871519344,\n", + " -0.015257579603396899,\n", + " 0.0361902004717432,\n", + " -0.005517774069943938,\n", + " -0.012951467416589982,\n", + " 0.04028001042474194,\n", + " 0.006124058835793098,\n", + " 0.04193348761744183,\n", + " -0.04627033685573121,\n", + " 0.007690577649728071,\n", + " 0.017749220097357857,\n", + " -0.02081447725767299,\n", + " 0.0007514732994476846,\n", + " -0.019239473145881408,\n", + " -0.0603031459676652,\n", + " 0.01785274395852625,\n", + " -0.010031196657865584,\n", + " 0.04046581560178902,\n", + " -0.051599538748286086,\n", + " 0.00810361912036099,\n", + " -0.0022536529857252463,\n", + " 0.01611264895401576,\n", + " 0.017734247041486375,\n", + " 0.05055314720581918,\n", + " 0.020158200490249058,\n", + " -0.017531943476273706,\n", + " 0.022425513523089278,\n", + " -0.01725407141614709,\n", + " 0.017051565718926628,\n", + " -0.015048107995554888,\n", + " -0.014327459827295833,\n", + " -0.030664417557961024,\n", + " 0.017833274739422568,\n", + " 0.002304076428728417,\n", + " -0.009193255871671916,\n", + " 0.05209380076640384,\n", + " 0.015607485396183517,\n", + " 0.00571496318578292,\n", + " -0.018614343392129105,\n", + " 0.018922180248470002,\n", + " -0.03975171888860612,\n", + " 0.022250699910936634,\n", + " 0.010406708669286648,\n", + " -0.014199561224011073,\n", + " 0.012134838817806356,\n", + " -0.008508123486194126,\n", + " 0.04890729667458985,\n", + " -0.0001117987994801219,\n", + " -0.016920083094327198,\n", + " -0.04526623676477744,\n", + " 0.04612078295019965,\n", + " -0.01520173341724345,\n", + " 0.03861527498878441,\n", + " 0.03165814946592676,\n", + " -0.01927776951771107,\n", + " -0.06350326594329833,\n", + " -0.02412123800400647,\n", + " -0.005603005833701096,\n", + " 0.01842686170842915,\n", + " -0.02619319299919596,\n", + " 0.02415740434610694,\n", + " -0.015742353307266428,\n", + " 0.059501398700984816,\n", + " 0.05144780045819971,\n", + " -0.04579435164772998,\n", + " 0.019913286138922716,\n", + " -0.044780389553101804,\n", + " 0.048579829233256704,\n", + " 0.020022367781011297,\n", + " -0.00915874820104717,\n", + " -0.019061044937824675,\n", + " 0.04741089813359404,\n", + " -0.048120568325665074,\n", + " -0.02592815377368183,\n", + " 0.02665202925971239,\n", + " 0.013914089679826712,\n", + " -0.02318628914136625,\n", + " 0.06672838234436594,\n", + " 0.012576479419659715,\n", + " 0.003004135583546494,\n", + " 3.547671525034858e-05,\n", + " -0.00902173837011696,\n", + " 0.01972757947999709,\n", + " 0.01236048098627191,\n", + " 0.00011354638143721521,\n", + " -0.004789501363449702,\n", + " 0.003176824049411309,\n", + " -0.016230208250313596,\n", + " -0.0077554518327002326,\n", + " 0.044926464749793345,\n", + " -0.025928230210155365,\n", + " 0.04847748928813397,\n", + " -0.0020664337339177654,\n", + " 0.008707540300589596,\n", + " -0.03286803352952153,\n", + " 0.01800992622411667,\n", + " -0.012281146721095103,\n", + " 0.011458429532547287,\n", + " -0.03267676229532251,\n", + " 0.011127214156128436,\n", + " -0.012816567232329356,\n", + " 0.012704007730694902,\n", + " -0.013369109513635598,\n", + " -0.049850777546264956,\n", + " -0.0031470964185885296,\n", + " 0.016431312612185903,\n", + " 0.0151051533850487,\n", + " -0.0003609691230367538,\n", + " 0.022406344954114834,\n", + " 0.00012560996787297992,\n", + " -0.022111714621812602,\n", + " 0.044512698831250135,\n", + " -0.053833861799148314,\n", + " 0.01650790875302183,\n", + " -0.029736818486898332,\n", + " 0.04184659123714989,\n", + " 0.007501414363610334,\n", + " -0.028920317282237262,\n", + " 0.01618877628448067,\n", + " 0.006841175638974807,\n", + " 0.02121181273158263,\n", + " -0.0015029506330425837,\n", + " -0.022241374661165283,\n", + " -0.06810564529912078,\n", + " -0.03595773507407336,\n", + " -0.023297879598375103,\n", + " -0.016696594735827047,\n", + " 0.00010562735045538377,\n", + " -0.0005413171605204545,\n", + " 0.019833143345714732,\n", + " -0.001962292435902037,\n", + " 0.006057933642537403,\n", + " -0.024383518722120114,\n", + " -0.016145509843282887,\n", + " -0.0552699400724064,\n", + " -0.05568484744228773,\n", + " 0.029207171477298472,\n", + " -0.008857645398024765,\n", + " 0.01744860224129533,\n", + " 0.07961568728648179,\n", + " 0.01170802009746571,\n", + " -0.0013449895647162508,\n", + " -0.009573153638085623,\n", + " -0.012503964137216439,\n", + " 0.0025928950411594903,\n", + " -0.03079672739506294,\n", + " -0.06221110226241319,\n", + " -0.016790618392629107,\n", + " 0.015105503294238662,\n", + " -0.020610001198023607,\n", + " -0.010144408417407543,\n", + " 0.04745147740809997,\n", + " 0.03808126260725151,\n", + " 0.010498935219666466,\n", + " 0.01779328317646871,\n", + " -0.01401463081993891,\n", + " -0.00555868032269777,\n", + " -0.0118979392553773,\n", + " 0.037290182475100096,\n", + " -0.030702109232355605,\n", + " -0.08678346493605049,\n", + " -0.017791404537808034,\n", + " 0.022546038554560298,\n", + " -0.043209562269941874,\n", + " -0.0059592397172023165,\n", + " 0.009436379910929224,\n", + " -0.05492324121433167,\n", + " -0.03230110930107248,\n", + " -0.04598720936340194,\n", + " 0.01082426243974062,\n", + " -0.04458008862349577,\n", + " 0.019792485936146968,\n", + " -0.05989038221621946,\n", + " -0.007952819300310797,\n", + " 0.02310088412160241,\n", + " 0.0035567708625622026,\n", + " 0.03804264690082129,\n", + " -0.01830387032674538,\n", + " 0.007322391649648327,\n", + " -0.016098010520039528,\n", + " -0.014954575230771759,\n", + " -0.022798640716536027,\n", + " -0.034649149441496156,\n", + " 0.0029219362245533256,\n", + " -0.024877539540699156,\n", + " 0.013417954968107333,\n", + " 0.0348014618544282,\n", + " 0.03228582540354195,\n", + " -0.003188703763663513,\n", + " 0.017202799528287672,\n", + " 0.007794318080904813,\n", + " 0.03942488671929746,\n", + " 0.04969450062823886,\n", + " -0.036457204963921346,\n", + " -0.025488817306858494,\n", + " 0.02254964975328778,\n", + " 0.025100640621066726,\n", + " 0.03505730322429354,\n", + " -0.0016342665836650003,\n", + " -0.012778117987552622,\n", + " 0.012669550168425029,\n", + " 0.0014133498878522506,\n", + " 3.4569032127112684e-05,\n", + " -0.04939236253564712,\n", + " 0.030633982252787427,\n", + " -0.018459433837685148,\n", + " -0.017738790765190995,\n", + " 0.020720577597816887,\n", + " -0.019212613369080977,\n", + " -0.017762146354326887,\n", + " -0.03179455969518679,\n", + " 0.06217457581959295,\n", + " 0.059594087267382446,\n", + " 0.006562100129037078,\n", + " -0.07859713220908827,\n", + " -0.04808286306256409,\n", + " -0.03502100099512901,\n", + " -0.04291169103116173,\n", + " -0.0037005228151053325,\n", + " 0.004857763805434868,\n", + " 0.010076655127265539,\n", + " -0.019253934926674352,\n", + " 0.02558160948714248,\n", + " 0.039912959081647124,\n", + " -0.021324247386976976,\n", + " 0.0349963239042951,\n", + " 0.044098729082110835,\n", + " 0.02322244868911352,\n", + " -0.02905168270424394,\n", + " -0.01956837929150533,\n", + " -0.01538425692169339,\n", + " 0.02116156339388029,\n", + " 0.010032895246166377,\n", + " -0.0024162117079347487,\n", + " -0.0076151926023506076,\n", + " -0.04372038552399235,\n", + " -0.0242278448029408,\n", + " -0.044034987857535304,\n", + " -0.033257441691831376,\n", + " -0.025010061701338675,\n", + " 0.023495685301767273,\n", + " 0.04602219348804506,\n", + " -0.017805232745164785,\n", + " 0.05034442127824122,\n", + " -0.00897384667297612,\n", + " -0.0036441080265181922,\n", + " -0.02901506453765546,\n", + " 0.046446347972635915,\n", + " 0.02196516212180854,\n", + " -0.019755643555902782,\n", + " 0.007998721950551409,\n", + " 0.033433880852987874,\n", + " -0.060589330918935896,\n", + " -0.017260126883439413,\n", + " -0.018551991612783623,\n", + " -0.05806348255518528,\n", + " -0.02666422172653548,\n", + " 0.03674949464154509,\n", + " 0.003201422792859846,\n", + " -0.008994014861165576,\n", + " 0.03730225264356553,\n", + " 0.020741230732966222,\n", + " -0.05201829511925702,\n", + " -0.02014508059421374,\n", + " -0.005126941641343879,\n", + " 0.005345376699649172,\n", + " -0.0017195335931293992,\n", + " -0.06209233017406859,\n", + " -0.0030268928449534343,\n", + " 0.0196094800326196,\n", + " -0.010102076199775196,\n", + " 0.022242368335321245,\n", + " 0.035151211377091145,\n", + " -0.01640789587387118,\n", + " -0.008404666719263617,\n", + " 0.03110659066724668,\n", + " -0.036406305066899805,\n", + " -0.06351906281449571,\n", + " 0.05894978554747911,\n", + " 0.020828509295625835,\n", + " 0.0035541783921681186,\n", + " 0.015183239187824422,\n", + " -0.03480415241829666,\n", + " 0.01907704563961814,\n", + " -0.01696793572393712,\n", + " -0.02979499513620047,\n", + " 0.028467123732056065,\n", + " 0.014328723576991623,\n", + " -0.009800164868721616,\n", + " 0.00961100582907463,\n", + " 0.02685146390928502,\n", + " 0.02525747467605548,\n", + " -0.05115985916662599,\n", + " 0.017978014845709683,\n", + " 0.026395552617822553,\n", + " -0.0025926037332659043,\n", + " -0.03656841153997422,\n", + " -0.006737509945683301,\n", + " 0.04295372769442973,\n", + " 0.027063950511360936,\n", + " 0.056661195997583166,\n", + " -0.047245234816617764,\n", + " -0.004039285443991678,\n", + " -0.009982450570809445,\n", + " 0.02456928822881287,\n", + " -0.044224074707179505,\n", + " -0.039665824675411646,\n", + " 0.00788772005438878,\n", + " -0.05553100629988497,\n", + " -0.04856794590950436,\n", + " -0.030128988553785263,\n", + " -0.02222443124286488,\n", + " 0.015847307379784084,\n", + " 0.01005015799906733,\n", + " 0.003907632960562167,\n", + " 0.02153829329311302,\n", + " 8.136896163761813e-05,\n", + " 0.009914704075020644,\n", + " -0.0760614999767251,\n", + " -0.027362385681456946,\n", + " 0.0014538421097077638,\n", + " -0.01876444084591692,\n", + " -0.0430278269104635,\n", + " -0.0023175920958378214,\n", + " 0.010362153848862713,\n", + " -0.002488226331476366,\n", + " -0.03218169514635018,\n", + " -0.021377467555617403,\n", + " 0.0034483975317950505,\n", + " 0.012253322995433974,\n", + " 0.0064942827171867185,\n", + " 0.018636725690168648,\n", + " -0.018416524100030533,\n", + " 0.002250299972419482,\n", + " 0.00388539716576357,\n", + " -0.03766780583177905,\n", + " -0.03145878615706277,\n", + " 0.028375267473925818,\n", + " -0.030383572968308018,\n", + " 0.030632867978862107,\n", + " 0.027544370733415535,\n", + " -0.029816251270196586,\n", + " -0.01082402888384926,\n", + " -0.021287413499674297,\n", + " -0.008144301159459117,\n", + " -0.02006852521949703,\n", + " 0.012242393429012526,\n", + " 0.0023814534955356355,\n", + " 0.010504916798367707,\n", + " -0.003963975559146524,\n", + " 0.010118618751236614,\n", + " 0.0095484476712506,\n", + " -0.023134696220317986,\n", + " -0.052570519764551016,\n", + " 0.018067778443053354,\n", + " 0.0011253619912620337,\n", + " -0.0011763613618609417,\n", + " -0.019461018319365447,\n", + " 0.06178496042951041,\n", + " 0.08660499766046284,\n", + " 0.007966578714841365,\n", + " 0.03751981123030761,\n", + " 0.019927479542764137,\n", + " 0.033541469435960064,\n", + " 0.018560039524152777,\n", + " 0.05295088464274744,\n", + " -0.02463407578378169,\n", + " 0.003170402324016626,\n", + " 0.03170759537136283,\n", + " -0.004896667422582065,\n", + " -0.025874704295620797,\n", + " -0.003799682578281912,\n", + " -0.05739372918818286,\n", + " 0.005129733271216232,\n", + " 0.000601790301205263,\n", + " -0.024045506444615644,\n", + " -0.03166052748954787,\n", + " 0.041305556891581524,\n", + " 0.04700336943129134,\n", + " 0.043861015046591546,\n", + " 0.005950258006914801,\n", + " -0.015145399736247673,\n", + " -0.021272435348037907,\n", + " -0.030566966149967966,\n", + " -0.022861938609565053,\n", + " -0.025176750965648628,\n", + " -0.035950781053569913,\n", + " -0.02594337312485693,\n", + " -0.015078449878371942,\n", + " -0.01576853364874654,\n", + " 0.02698622311071668,\n", + " 0.025010744533835592,\n", + " -0.03144068260295292,\n", + " 0.012027469352724971,\n", + " 0.02908023597358026,\n", + " 0.03785931147116357,\n", + " -0.01627024397656327,\n", + " 0.04491012093316312,\n", + " 0.007717387318173627,\n", + " 0.01981146426323172,\n", + " -0.029866033496116207,\n", + " 0.0018561327903378927,\n", + " 0.008776846100438525,\n", + " 0.009444030352635992,\n", + " -0.036621638502967846,\n", + " -0.06667360626884199,\n", + " 0.014635967626015541,\n", + " 0.010856250254621141,\n", + " -0.06246334262508085,\n", + " -0.044456268330721214,\n", + " -0.028999790831654736,\n", + " -0.0016185334095289113,\n", + " 0.024386540510707225,\n", + " -0.002606236815291601,\n", + " -0.015040418486317202,\n", + " 0.02694064658942982,\n", + " -0.04033046868880528,\n", + " 0.011481050482243087,\n", + " -0.0002851583669664034,\n", + " -0.013553662831104984,\n", + " 0.01830506783149744,\n", + " 0.039233584910509024,\n", + " 0.017226627324971187,\n", + " 0.016508664624815685,\n", + " -0.01997347561536129,\n", + " 0.023850962030760998,\n", + " 0.025694431420669406,\n", + " -0.04235328692162945,\n", + " 0.024446719795615995,\n", + " -0.009657694076404358,\n", + " -0.007223106615584563,\n", + " 0.035835416333356705,\n", + " 0.03458956975825756,\n", + " 0.0034102064724400357,\n", + " 0.019494458427243146,\n", + " 0.019050858503784823,\n", + " 0.0008209059626103287,\n", + " -0.022701914605747407,\n", + " 0.034004538578522064,\n", + " 0.02795963140966344,\n", + " -0.006536282436159185,\n", + " 0.031770881374273745,\n", + " 0.026138111177777867,\n", + " 0.007689961062174884,\n", + " -0.00884935713641105,\n", + " 0.023706398577657162,\n", + " -0.0354720373381682,\n", + " 0.04639459878146397,\n", + " -0.04337121691852828,\n", + " 0.04195897663348352,\n", + " 0.0029317978035806507,\n", + " 0.022601514448464175,\n", + " 0.005712487493334515,\n", + " -0.036787665317840494,\n", + " -0.030159257397305385,\n", + " 0.04474889093165191,\n", + " -0.020461908078430728,\n", + " -0.033295775432603654,\n", + " -0.009015490113052494,\n", + " -0.005743235763402533,\n", + " -0.004690700002404589,\n", + " 0.026126749320633868,\n", + " 0.022905252611235258,\n", + " -0.039281077439399174,\n", + " 0.005038873235124399,\n", + " 0.03223281246267422,\n", + " 0.00012400915456125197,\n", + " 0.006310129841909243,\n", + " 0.02119534321941815,\n", + " 0.010207632421845485,\n", + " 0.025059458347714017,\n", + " 0.0010364952485411806,\n", + " 0.06196407996300556,\n", + " -0.024675956176926025,\n", + " -0.004480489508651733,\n", + " -0.03489097406070336,\n", + " -0.07294984247979722,\n", + " 0.01606265440455854,\n", + " -0.00464456889203752,\n", + " -0.012758805038572614,\n", + " 0.036579282505099286,\n", + " -0.03499107526644565,\n", + " -0.03469085657863381,\n", + " -0.04192478744816517,\n", + " 0.014854589529033919,\n", + " -0.03962853386785605,\n", + " 0.016376263063945525,\n", + " 0.03680671328704558,\n", + " -0.015865949386385282,\n", + " -0.05638251880660197,\n", + " 0.017371922869632662,\n", + " -0.020153254201117153,\n", + " 0.03444526109339885,\n", + " -0.0023674634976432346,\n", + " 0.04169939497217983,\n", + " 0.00900587525397586,\n", + " 0.09774552874886949,\n", + " 0.06094755639721979,\n", + " 0.027318137456221307,\n", + " 0.03800960935837088,\n", + " -0.005573846168341394,\n", + " -0.010732513192673323,\n", + " 0.0220632589933559,\n", + " 0.018628108751718726,\n", + " -0.03508414431662266,\n", + " -0.005467193507522944,\n", + " 0.024568122997238526,\n", + " -0.024389521533175116,\n", + " -0.007586546759951895,\n", + " 0.021835340716567283,\n", + " -0.017641077774599615,\n", + " -0.019782513524233016,\n", + " -0.012222517398010805,\n", + " 0.02673793026726006,\n", + " -0.036158267011688304,\n", + " 0.006769670591923427,\n", + " 0.03768762835724929,\n", + " -0.02081457237861783,\n", + " 0.004871667599971004,\n", + " -0.01686945327284548,\n", + " -0.004077225536925251,\n", + " -0.004157124158064996,\n", + " 0.02339438659127292,\n", + " -0.04073971636246838,\n", + " 0.04420616139495935,\n", + " 0.007501007551712294,\n", + " -0.04853214646247686,\n", + " -0.04336293120479701,\n", + " -0.041356178220121734,\n", + " -0.024567756102165553,\n", + " 0.025567789772727233,\n", + " -0.0362613815130762,\n", + " -0.042847912437643576,\n", + " 0.039371330230173474,\n", + " 0.008044159187597604,\n", + " -0.013982306684574832,\n", + " 0.03940952808388169,\n", + " -0.05050916735753507,\n", + " 0.01094377681117267,\n", + " 0.07324528813448383,\n", + " 0.057892461890708305,\n", + " -0.019158953266070648,\n", + " 0.05379718927773421,\n", + " 0.07582980573814381,\n", + " 0.01495569120328538,\n", + " -0.015385551245978594,\n", + " 0.008477230411473467,\n", + " 0.03167678977393966,\n", + " 0.024023864731075248,\n", + " -0.022033929469166116,\n", + " 0.020316485139646692,\n", + " 0.01796654258032613,\n", + " -0.016496743932120726,\n", + " -0.033600220208107864,\n", + " 0.04130522736545117,\n", + " 0.025339689746990433,\n", + " -0.020648025795725146,\n", + " -0.005771851880505981,\n", + " -0.04703083220693855,\n", + " 0.015390704762883197,\n", + " -0.04739389186752651,\n", + " 0.017305013777876153,\n", + " -0.025397188659560553,\n", + " 0.04414393191397152,\n", + " 0.0032622258843220836,\n", + " 0.01715164144584441,\n", + " -0.019461247628786055,\n", + " -0.01197892794555908,\n", + " 0.22946685935938824,\n", + " 0.08585584548192819,\n", + " 0.0017177583560315335,\n", + " 0.007939204265785796,\n", + " 0.012423863808715975,\n", + " 0.019571392587150935,\n", + " 0.039077087176003826,\n", + " -0.009671346479871978,\n", + " 0.02826686526715755,\n", + " -0.016220001433214132,\n", + " 0.027636259264723496,\n", + " 0.01363886741744933,\n", + " -0.016585559717192555,\n", + " -0.01888198825209665,\n", + " 0.03042394501504125,\n", + " 0.0036872131018273993,\n", + " -0.034775531205428305,\n", + " 0.019958731868910413,\n", + " 0.010431589590716652,\n", + " -0.03724169117628908,\n", + " -0.032167535714274775,\n", + " 0.026692859925286836,\n", + " 0.005396705490216665,\n", + " -0.019846006754916632,\n", + " -0.024243337626832326,\n", + " -0.0005732143132504209,\n", + " 0.00889274842384894,\n", + " -0.017262576247769158,\n", + " -0.02434507457310828,\n", + " -0.044503159559352884,\n", + " -0.022849982246515774,\n", + " -0.03244168446884605,\n", + " 0.04789051152620576,\n", + " 0.018081137840039084,\n", + " -0.02585010194267212,\n", + " 0.04579085255583035,\n", + " -0.01277093041115782,\n", + " -0.024451394310619774,\n", + " 0.003394707915812994,\n", + " -0.019456440623894813,\n", + " -0.034643177205030566,\n", + " 0.016056084265011074,\n", + " -0.020719629785545046,\n", + " -0.027647442770095915,\n", + " -0.04405622700564841,\n", + " 0.07541808831709136,\n", + " 0.04290858940892448,\n", + " -0.028221011875977663,\n", + " 0.022949108462573416,\n", + " -0.08236971041385444,\n", + " 0.0669823484727472,\n", + " -0.01759557938837459,\n", + " 0.05346629408521006,\n", + " -0.01437013515976494,\n", + " -0.05215430108450146,\n", + " 0.027572052626953548,\n", + " 0.0143537573713687,\n", + " -0.031606359508635606,\n", + " -0.011700159880103794,\n", + " -0.007321075243715212,\n", + " 0.028227952307774702,\n", + " 0.03823309601828273,\n", + " -0.01067276025342805,\n", + " 0.005555437717631557,\n", + " 0.012671244510255069,\n", + " -0.014078266732039794,\n", + " 0.04050639147911834,\n", + " 0.029728356119983784,\n", + " -0.011484310073192307,\n", + " 0.049362474175906376,\n", + " 0.032166472397998476,\n", + " ...]],\n", + " {'id': 'b233b2e0-14a6-491e-8b5e-1bb6eb51926b'})]" + ] + }, + "execution_count": 1, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# test binary and int8 quant and rescore\n", + "\n", + "from vlite import VLite\n", + "from vlite.model import EmbeddingModel\n", + "from vlite.utils import process_pdf\n", + "\n", + "vlite = VLite()\n", + "\n", + "# Add a single text to the collection\n", + "text1 = process_pdf(\"data/attention2.pdf\")\n", + "vlite.add(text1)" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Retrieving similar texts...\n", + "Retrieving top 5 similar texts for query: What is attention?\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "\n", + "llama_print_timings: load time = 610.02 ms\n", + "llama_print_timings: sample time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: prompt eval time = 138.28 ms / 6 tokens ( 23.05 ms per token, 43.39 tokens per second)\n", + "llama_print_timings: eval time = 0.00 ms / 1 runs ( 0.00 ms per token, inf tokens per second)\n", + "llama_print_timings: total time = 138.39 ms / 7 tokens\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Retrieval completed.\n" + ] + } + ], + "source": [ + "query = \"What is attention?\"\n", + "results = vlite.retrieve(query, top_k=5)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Results for query: What is attention?\n", + "[('block, computing hidden representations in parallel for all input and output positions. In these models,\\nthe number of operations required to relate signals from two arbitrary input or output positions grows\\nin the distance between positions, linearly for ConvS2S and logarithmically for ByteNet. This makes\\nit more difficult to learn dependencies between distant positions [ 11]. In the Transformer this is\\nreduced to a constant number of operations, albeit at the cost of reduced effective resolution due\\nto averaging attention-weighted positions, an effect we counteract with Multi-Head Attention as\\ndescribed in section 3.2.\\nSelf-attention, sometimes called intra-attention is an attention mechanism relating different positions\\nof a single sequence in order to compute a representation of the sequence. Self-attention has been\\nused successfully in a variety of tasks including reading comprehension, abstractive summarization,\\ntextual entailment and learning task-independent sentence representations [4, 22, 23, 19].\\nEnd-to-end memory networks are based on a recurrent attention mechanism instead of sequence-\\naligned recurrence and have been shown to perform well on simple-language question answering and\\nlanguage modeling tasks [28].\\nTo the best of our knowledge, however, the Transformer is the first transduction model relying\\nentirely on self-attention to compute representations of its input and output without using sequence-\\naligned RNNs or convolution. In the following sections, we will describe the Transformer, motivate\\nself-attention and discuss its advantages over models such as [14, 15] and [8].\\n3 Model Architecture\\nMost competitive neural sequence transduction models have an encoder-decoder structure [ 5,2,29].\\nHere, the encoder maps an input sequence of symbol representations (x1,. . . ,x n)to a sequence\\nof continuous representations z=( z1,. . . ,z n). Given z, the decoder then generates an output\\nsequence (y1,. . . ,y m)of symbols one element at a time. At each step the model is auto-regressive\\n[9], consuming the previously generated symbols as additional input when generating the next.\\nThe Transformer follows this overall architecture using stacked self-attention and point-wise, fully\\nconnected layers for both the encoder and decoder, shown in the left and right halves of Figure 1,\\nrespectively.\\n3.1 Encoder and Decoder Stacks\\nEncoder: The encoder is composed of a stack of N =6 identical layers. Each layer has two', 0, {'id': '6795a8d8-ec0d-4fb4-9c37-68d29b168889'}), (' the first Transformer models and\\nhas been crucially involved in every aspect of this work. Noam proposed scaled dot-product attention, multi-head\\nattention and the parameter-free position representation and became the other person involved in nearly every\\ndetail. Niki designed, implemented, tuned and evaluated countless model variants in our original codebase and\\ntensor2tensor. Llion also experimented with novel model variants, was responsible for our initial codebase, and\\nefficient inference and visualizations. Lukasz and Aidan spent countless long days designing various parts of and\\nimplementing tensor2tensor, replacing our earlier codebase, greatly improving results and massively accelerating\\nour research.\\n†Work performed while at Google Brain.\\n‡Work performed while at Google Research.\\n31st Conference on Neural Information Processing Systems (NIPS 2017), Long Beach, CA, USA.Recurrent models typically factor computation along the symbol positions of the input and output\\nsequences. Aligning the positions to steps in computation time, they generate a sequence of hidden\\nstates ht, as a function of the previous hidden state ht\\x001and the input for position t. This inherently\\nsequential nature precludes parallelization within training examples, which becomes critical at longer\\nsequence lengths, as memory constraints limit batching across examples. Recent work has achieved\\nsignificant improvements in computational efficiency through factorization tricks [ 18] and conditional\\ncomputation [ 26], while also improving model performance in case of the latter. The fundamental\\nconstraint of sequential computation, however, remains.\\nAttention mechanisms have become an integral part of compelling sequence modeling and transduc-\\ntion models in various tasks, allowing modeling of dependencies without regard to their distance in\\nthe input or output sequences [ 2,16]. In all but a few cases [ 22], however, such attention mechanisms\\nare used in conjunction with a recurrent network.\\nIn this work we propose the Transformer, a model architecture eschewing recurrence and instead\\nrelying entirely on an attention mechanism to draw global dependencies between input and output.\\nThe Transformer allows for significantly more parallelization and can reach a new state of the art in\\ntranslation quality after being trained for as little as twelve hours on eight P100 GPUs.\\n2 Background\\nThe goal of reducing sequential computation also forms the foundation of the Extended Neural GPU\\n[20], ByteNet [ 15] and ConvS2S [ 8], all of which use convolutional neural networks as basic building\\n', 0, {'id': '5f90161b-d63f-4539-b9ad-b5f9830965f3'}), ('Attention Is All You Need\\nAshish Vaswani⇤\\nGoogle Brain\\navaswani@google.comNoam Shazeer⇤\\nGoogle Brain\\nnoam@google.comNiki Parmar⇤\\nGoogle Research\\nnikip@google.comJakob Uszkoreit⇤\\nGoogle Research\\nusz@google.com\\nLlion Jones⇤\\nGoogle Research\\nllion@google.comAidan N. Gomez⇤†\\nUniversity of Toronto\\naidan@cs.toronto.eduŁukasz Kaiser⇤\\nGoogle Brain\\nlukaszkaiser@google.com\\nIllia Polosukhin⇤‡\\nillia.polosukhin@gmail.com\\nAbstract\\nThe dominant sequence transduction models are based on complex recurrent or\\nconvolutional neural networks that include an encoder and a decoder. The best\\nperforming models also connect the encoder and decoder through an attention\\nmechanism. We propose a new simple network architecture, the Transformer,\\nbased solely on attention mechanisms, dispensing with recurrence and convolutions\\nentirely. Experiments on two machine translation tasks show these models to\\nbe superior in quality while being more parallelizable and requiring significantly\\nless time to train. Our model achieves 28.4 BLEU on the WMT 2014 English-\\nto-German translation task, improving over the existing best results, including\\nensembles, by over 2 BLEU. On the WMT 2014 English-to-French translation task,\\nour model establishes a new single-model state-of-the-art BLEU score of 41.0 after\\ntraining for 3.5 days on eight GPUs, a small fraction of the training costs of the\\nbest models from the literature.\\n1 Introduction\\nRecurrent neural networks, long short-term memory [ 12] and gated recurrent [ 7] neural networks\\nin particular, have been firmly established as state of the art approaches in sequence modeling and\\ntransduction problems such as language modeling and machine translation [ 29,2,5]. Numerous\\nefforts have since continued to push the boundaries of recurrent language models and encoder-decoder\\narchitectures [31, 21, 13].\\n⇤Equal contribution. Listing order is random. Jakob proposed replacing RNNs with self-attention and started\\nthe effort to evaluate this idea. Ashish, with Illia, designed and implemented', 0, {'id': 'b6c62336-83da-40c7-a80b-d9289eb5a65d'}), ('\\nsub-layers. The first is a multi-head self-attention mechanism, and the second is a simple, position-\\n2', 0, {'id': 'b233b2e0-14a6-491e-8b5e-1bb6eb51926b'})]\n" + ] + } + ], + "source": [ + "print(\"Results for query: \", query)\n", + "print(results)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "base", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.6" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/vlite/main.py b/vlite/main.py index 6df89be..ece5788 100644 --- a/vlite/main.py +++ b/vlite/main.py @@ -21,19 +21,29 @@ class VLite: def __init__(self, collection=None, device='cpu', model_name='mixedbread-ai/mxbai-embed-large-v1'): if collection is None: current_datetime = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") - collection = f"vlite_{current_datetime}.npz" + collection = f"vlite_{current_datetime}" self.collection = f"{collection}.npz" self.device = device self.model = EmbeddingModel(model_name) if model_name else EmbeddingModel() try: with np.load(self.collection, allow_pickle=True) as data: - self.index = data['index'].item() + index_data = data['index'].item() + self.index = { + chunk_id: { + 'text': chunk_data['text'], + 'metadata': chunk_data['metadata'], + 'vector': np.array(chunk_data['vector']), # Convert back to numpy array + 'binary_vector': np.array(chunk_data['binary_vector']), # Convert back to numpy array + 'int8_vector': np.array(chunk_data['int8_vector']) # Convert back to numpy array + } + for chunk_id, chunk_data in index_data.items() + } except FileNotFoundError: print(f"Collection file {self.collection} not found. Initializing empty attributes.") self.index = {} - def add(self, data, metadata=None, need_chunks=True, newEmbedding=False): + def add(self, data, metadata=None, need_chunks=True, newEmbedding=False, fast=True): """ Adds text or a list of texts to the collection with optional ID within metadata. @@ -41,6 +51,7 @@ def add(self, data, metadata=None, need_chunks=True, newEmbedding=False): data (str, dict, or list): Text data to be added. Can be a string, a dictionary containing text, id, and/or metadata, or a list of strings or dictionaries. metadata (dict, optional): Additional metadata to be appended to each text entry. need_chunks (bool, optional): Whether to split the text into chunks before embedding. Defaults to True. + fast (bool, optional): Whether to use fast mode for chunking. Defaults to True. Returns: list: A list of tuples, each containing the ID of the added text and the updated vectors array. @@ -48,6 +59,9 @@ def add(self, data, metadata=None, need_chunks=True, newEmbedding=False): print("Adding text to the collection...") data = [data] if not isinstance(data, list) else data results = [] + all_chunks = [] + all_metadata = [] + all_ids = [] for item in data: if isinstance(item, dict): @@ -60,26 +74,35 @@ def add(self, data, metadata=None, need_chunks=True, newEmbedding=False): item_id = str(uuid4()) item_metadata.update(metadata or {}) - item_metadata['id'] = item_id - + item_metadata['id'] = item_id + if need_chunks: - chunks = chop_and_chunk(text_content) - encoded_data = self.model.embed(chunks, device=self.device) + chunks = chop_and_chunk(text_content, fast=fast) else: chunks = [text_content] print("Encoding text... not chunking") - encoded_data = self.model.embed(chunks, device=self.device) - - for idx, (chunk, vector) in enumerate(zip(chunks, encoded_data)): - chunk_id = f"{item_id}_{idx}" - self.index[chunk_id] = { - 'text': chunk, - 'metadata': item_metadata, - 'vector': vector - } - - results.append((item_id, encoded_data, item_metadata)) - + + all_chunks.extend(chunks) + all_metadata.extend([item_metadata] * len(chunks)) + all_ids.extend([item_id] * len(chunks)) + + encoded_data = self.model.embed(all_chunks, device=self.device) + binary_encoded_data = self.model.quantize(encoded_data, precision="binary") + int8_encoded_data = self.model.quantize(encoded_data, precision="int8") + + for idx, (chunk, vector, binary_vector, int8_vector, metadata, item_id) in enumerate(zip(all_chunks, encoded_data, binary_encoded_data, int8_encoded_data, all_metadata, all_ids)): + chunk_id = f"{item_id}_{idx}" + self.index[chunk_id] = { + 'text': chunk, + 'metadata': metadata, + 'vector': vector, + 'binary_vector': binary_vector.tolist(), + 'int8_vector': int8_vector.tolist() + } + + if item_id not in [result[0] for result in results]: + results.append((item_id, encoded_data, metadata)) + self.save() print("Text added successfully.") return results @@ -99,25 +122,65 @@ def retrieve(self, text=None, top_k=5, metadata=None, newEmbedding=False): print("Retrieving similar texts...") if text: print(f"Retrieving top {top_k} similar texts for query: {text}") - query_vector = self.model.embed([text], device=self.device) - similarities = np.dot(query_vector, np.array([item['vector'] for item in self.index.values()]).T).flatten() - - # Apply metadata filter while finding similar texts - if metadata: - filtered_indices = [] - for idx, item_id in enumerate(self.index.keys()): # Iterate over item IDs - item_metadata = self.index[item_id]['metadata'] - if all(item_metadata.get(key) == value for key, value in metadata.items()): - filtered_indices.append(idx) - if len(filtered_indices) == top_k: # Stop when we have found top_k - break - top_k_ids = [list(self.index.keys())[idx] for idx in filtered_indices] - else: - top_k_ids = [list(self.index.keys())[idx] for idx in np.argsort(similarities)[-top_k:][::-1]] + query_chunks = chop_and_chunk(text, fast=True) + query_vectors = self.model.embed(query_chunks, device=self.device) + query_binary_vectors = self.model.quantize(query_vectors, precision="binary") + query_int8_vectors = self.model.quantize(query_vectors, precision="int8") + + results = [] + for query_binary_vector, query_int8_vector in zip(query_binary_vectors, query_int8_vectors): + chunk_results = self.rescore(query_binary_vector, query_int8_vector, top_k, metadata) + results.extend(chunk_results) + + results.sort(key=lambda x: x[1], reverse=True) + results = results[:top_k] print("Retrieval completed.") - return [(self.index[idx]['text'], similarities[list(self.index.keys()).index(idx)], self.index[idx]['metadata']) for idx in top_k_ids] - + return [(self.index[idx]['text'], score, self.index[idx]['metadata']) for idx, score in results] + + def rescore(self, query_binary_vector, query_int8_vector, top_k, metadata=None): + """ + Performs retrieval using binary search and rescoring using int8 embeddings. + + Args: + query_binary_vector (numpy.ndarray): Binary vector of the query. + query_int8_vector (numpy.ndarray): Int8 vector of the query. + top_k (int): Number of top similar texts to retrieve. + metadata (dict, optional): Metadata to filter the retrieved texts. + + Returns: + list: A list of tuples containing the chunk IDs and their similarity scores. + """ + # Reshape query_binary_vector and query_int8_vector to 1D arrays + query_binary_vector = query_binary_vector.reshape(-1) + query_int8_vector = query_int8_vector.reshape(-1) + + # Perform binary search + binary_vectors = np.array([item['binary_vector'] for item in self.index.values()]) + binary_similarities = np.einsum('i,ji->j', query_binary_vector, binary_vectors) + top_k_indices = np.argpartition(binary_similarities, -top_k*4)[-top_k*4:] + top_k_ids = [list(self.index.keys())[idx] for idx in top_k_indices] + + # Apply metadata filter on the retrieved top_k*4 items + if metadata: + filtered_ids = [] + for item_id in top_k_ids: + item_metadata = self.index[item_id]['metadata'] + if all(item_metadata.get(key) == value for key, value in metadata.items()): + filtered_ids.append(item_id) + top_k_ids = filtered_ids[:top_k*4] + + # Perform rescoring using int8 embeddings + int8_vectors = np.array([self.index[idx]['int8_vector'] for idx in top_k_ids]) + int8_similarities = np.einsum('i,ji->j', query_int8_vector, int8_vectors) + + # Sort the results based on the int8 similarities + sorted_indices = np.argpartition(int8_similarities, -top_k)[-top_k:] + sorted_ids = np.take(top_k_ids, sorted_indices) + sorted_scores = int8_similarities[sorted_indices] + + return list(zip(sorted_ids, sorted_scores)) + def delete(self, ids): """ Deletes items from the collection by their IDs. @@ -236,9 +299,20 @@ def save(self): Saves the current state of the collection to a file. """ print(f"Saving collection to {self.collection}") + index_data = { + chunk_id: { + 'text': chunk_data['text'], + 'metadata': chunk_data['metadata'], + 'vector': chunk_data['vector'], + 'binary_vector': chunk_data['binary_vector'], + 'int8_vector': chunk_data['int8_vector'] + } + for chunk_id, chunk_data in self.index.items() + } with open(self.collection, 'wb') as f: - np.savez(f, index=self.index) + np.savez(f, index=index_data) print("Collection saved successfully.") + def clear(self): """ diff --git a/vlite/model.py b/vlite/model.py index 16c3b9b..2ab11e1 100644 --- a/vlite/model.py +++ b/vlite/model.py @@ -3,6 +3,7 @@ import llama_cpp from huggingface_hub import hf_hub_download import tiktoken +import numpy as np class EmbeddingModel: def __init__(self, model_name='mixedbread-ai/mxbai-embed-large-v1'): @@ -14,6 +15,8 @@ def __init__(self, model_name='mixedbread-ai/mxbai-embed-large-v1'): self.max_seq_length = 512 # hardcoded def embed(self, texts, max_seq_length=512, device="cpu"): + if isinstance(texts, str): + texts = [texts] embeddings_dict = self.model.create_embedding(texts) return [item["embedding"] for item in embeddings_dict["data"]] @@ -24,5 +27,38 @@ def token_count(self, texts): token_ids = enc.encode(text, disallowed_special=()) tokens += len(token_ids) return tokens + + def quantize(self, embeddings, precision="binary"): + """ + Quantizes the embeddings to the specified precision. + + Args: + embeddings (list or numpy.ndarray): Input embeddings to quantize. + precision (str, optional): Precision to quantize the embeddings. Can be "binary" or "int8". Defaults to "binary". + + Returns: + numpy.ndarray: Quantized embeddings. + """ + embeddings = np.array(embeddings) # Convert embeddings to a numpy array + + if precision == "binary": + return np.packbits(embeddings > 0).reshape(embeddings.shape[0], -1) + elif precision == "int8": + return ((embeddings - np.min(embeddings, axis=0)) / (np.max(embeddings, axis=0) - np.min(embeddings, axis=0)) * 255).astype(np.uint8) + else: + raise ValueError(f"Unsupported precision: {precision}") + + def rescore(self, query_vector, vectors): + """ + Rescores the retrieved vectors using the query vector. + + Args: + query_vector (numpy.ndarray): Query vector for rescoring. + vectors (numpy.ndarray): Retrieved vectors to rescore. + + Returns: + numpy.ndarray: Rescored similarities. + """ + return np.dot(query_vector, vectors.T).flatten() \ No newline at end of file diff --git a/vlite/utils.py b/vlite/utils.py index 608ed56..9e63444 100644 --- a/vlite/utils.py +++ b/vlite/utils.py @@ -21,32 +21,30 @@ except ImportError: run_ocr = None -def chop_and_chunk(text, max_seq_length=512): +def chop_and_chunk(text, max_seq_length=512, fast=False): """ - Chop text into chunks of max_seq_length tokens. + Chop text into chunks of max_seq_length tokens or max_seq_length*4 characters (fast mode). """ if isinstance(text, str): text = [text] - enc = tiktoken.get_encoding("cl100k_base") chunks = [] - - print(f"Lenght of text: {len(text)}") + print(f"Length of text: {len(text)}") print(f"Original text: {text}") - for t in text: - token_ids = enc.encode(t, disallowed_special=()) - num_tokens = len(token_ids) - - if num_tokens <= max_seq_length: - chunks.append(t) + if fast: + chunk_size = max_seq_length * 4 + chunks.extend([t[i:i + chunk_size] for i in range(0, len(t), chunk_size)]) else: - for i in range(0, num_tokens, max_seq_length): - chunk = enc.decode(token_ids[i:i + max_seq_length]) - chunks.append(chunk) - - print("Chopped text into this chunk:",chunks) - + token_ids = enc.encode(t, disallowed_special=()) + num_tokens = len(token_ids) + if num_tokens <= max_seq_length: + chunks.append(t) + else: + for i in range(0, num_tokens, max_seq_length): + chunk = enc.decode(token_ids[i:i + max_seq_length]) + chunks.append(chunk) + print("Chopped text into these chunks:", chunks) print(f"Chopped text into {len(chunks)} chunks.") return chunks