{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "True" ] }, "execution_count": 1, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import torch\n", "import torch.nn as nn\n", "torch.cuda.is_available()" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/bitsandbytes/cuda_setup/main.py:136: UserWarning: /opt/conda did not contain libcudart.so as expected! Searching further paths...\n", " warn(msg)\n", "The tokenizer class you load from this checkpoint is not the same type as the class this function is called from. It may result in unexpected tokenization. \n", "The tokenizer class you load from this checkpoint is 'LLaMATokenizer'. \n", "The class this function is called from is 'LlamaTokenizer'.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\n", "===================================BUG REPORT===================================\n", "Welcome to bitsandbytes. For bug reports, please submit your error trace to: https://github.com/TimDettmers/bitsandbytes/issues\n", "================================================================================\n", "CUDA SETUP: CUDA runtime path found: /usr/local/cuda/lib64/libcudart.so\n", "CUDA SETUP: Highest compute capability among GPUs detected: 7.5\n", "CUDA SETUP: Detected CUDA version 113\n", "CUDA SETUP: Loading binary /home/matt/hf/sqllama-V0/.venv/lib/python3.7/site-packages/bitsandbytes/libbitsandbytes_cuda113.so...\n" ] }, { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "6358c104eb744a6f807157a551f13094", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Loading checkpoint shards: 0%| | 0/33 [00:00, ?it/s]" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "from transformers import LlamaTokenizer, LlamaForCausalLM\n", "from peft import prepare_model_for_int8_training\n", "tokenizer = LlamaTokenizer.from_pretrained(\n", " \"decapoda-research/llama-7b-hf\")\n", " \n", "tokenizer.pad_token_id = 0\n", "tokenizer.padding_side = 'left'\n", "\n", "model = LlamaForCausalLM.from_pretrained(\n", " \"decapoda-research/llama-7b-hf\",\n", " load_in_8bit=True,\n", " device_map=\"auto\",\n", " torch_dtype=torch.float16\n", ")\n", "\n", "model = prepare_model_for_int8_training(model)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "\n", "table: 2-16050349-13\n", "columns: Rank,Name,Team,Games,Points\n", "Q: What is Games, when Points is less than 340, and when Rank is greater than 3?\n", "A: SELECT Games FROM 2-16050349-13 WHERE Points < 340 AND Rank > 3\n", "END\n", "\n", "\n", "table: 1-28962227-1\n", "columns: Series,Premiere,Finale,Runners-up,Winner\n", "Q: What is the date of the finale where Holly Bell was runner-up?\n", "A: SELECT Finale FROM 1-28962227-1 WHERE Runners-up = 'Holly Bell'\n", "END\n", "\n", "\n", "table: 2-10652530-2\n", "columns: Week,Date,Opponent,Result,Stadium,Record,Attendance\n", "Q: What was the Browns record after they played the game at the Paul Brown stadium?\n", "A: SELECT Record FROM 2-10652530-2 WHERE Stadium = 'paul brown stadium'\n", "END\n", "\n", "\n", "table: 2-18379129-4\n", "columns: play,author,company,base,country\n", "Q: Who is the author of the Play Electra?\n", "A: SELECT author FROM 2-18379129-4 WHERE play = 'electra'\n", "END\n", "\n", "\n", "table: 2-16158579-1\n", "columns: School year,95-96,99-00,00-01,01-02,02-03,03-04,04-05,05-06,06-07\n", "Q: What is 02-03, when School Year is % Learning In Latvian?\n", "A: SELECT 02-03 FROM 2-16158579-1 WHERE School year = '% learning in latvian'\n", "END\n", "\n" ] } ], "source": [ "import random\n", "import json\n", "\n", "# defined by WikiSQL\n", "\n", "agg_ops = ['', 'MAX', 'MIN', 'COUNT', 'SUM', 'AVG']\n", "cond_ops = ['=', '>', '<', 'OP']\n", "syms = ['SELECT', 'WHERE', 'AND', 'COL', 'TABLE', 'CAPTION', 'PAGE', 'SECTION', 'OP', 'COND', 'QUESTION', 'AGG', 'AGGOPS', 'CONDOPS']\n", "\n", "def fix_repr(d,cols,types,tid):\n", " sel_index=d['sel'] \n", " agg_index=d['agg']\n", " conditions=d['conds']\n", " col = cols[sel_index]\n", " rep = 'SELECT {agg} {sel} FROM {tid}'.format(\n", " agg=agg_ops[agg_index],\n", " sel=col,\n", " tid=tid\n", " )\n", " if conditions:\n", " cs = []\n", " for i, o, v in conditions:\n", " #print(i,cols)\n", " nm = cols[i]\n", " op = cond_ops[o]\n", " \n", " if types[i] in ['text']:\n", " val = f\"\\'{v}\\'\"\n", " else:\n", " val = v\n", " cs.append(f'{nm} {op} {val}')\n", " #print(cs)\n", "\n", " rep += ' WHERE ' + ' AND '.join(cs)\n", " \n", " return rep\n", "\n", "tbl_cols = {}\n", "tbl_types = {}\n", "tbl_str = {}\n", "\n", "prefix = 'Below is a question that describes a data request, paired with an input that describes a SQL table. Write a SQL query that retrieves the data.'\n", "\n", "def tbl_def_to_string(id, header, types):\n", " s = f'table: {id}\\ncolumns: ' + ','.join(header)\n", " return s\n", "\n", "with open('data/train.tables.jsonl') as f:\n", " for line in f:\n", " js = json.loads(line)\n", " id = js['id']\n", " hdr = js['header']\n", " ts = js['types']\n", " tbl_str[id] = tbl_def_to_string(id,hdr,ts)\n", " tbl_cols[id] = hdr\n", " tbl_types[id] = ts\n", "\n", "q_s = []\n", "a_s = []\n", "\n", "with open('data/train.jsonl') as f:\n", " for line in f:\n", " js = json.loads(line)\n", " id = js['table_id']\n", " s = tbl_str[id]\n", " qst = js['question']\n", " nl = s + '\\nQ: ' + qst + '\\nA: '\n", " q_s.append(nl)\n", "\n", " sql = js['sql']\n", " a = fix_repr(sql,tbl_cols[id],tbl_types[id],id)\n", " a = a + \"\\nEND\\n\"\n", " a_s.append(a)\n", "\n", "M = len(q_s)\n", "\n", "data_txt = [q_s[i] + a_s[i] for i in range(M)]\n", "\n", "for i in range(5):\n", " j = random.randint(0,M-1)\n", " print()\n", " print(data_txt[j]) \n", " \n", " " ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "toks = [tokenizer(s) for s in data_txt]\n" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "92\n", " 0\n", "count 56355.000000\n", "mean 101.219519\n", "std 21.740325\n", "min 63.000000\n", "25% 87.500000\n", "50% 97.000000\n", "75% 109.000000\n", "max 461.000000\n", "32084\n" ] } ], "source": [ "import numpy as np\n", "import pandas as pd\n", "\n", "print(len(toks[0].input_ids))\n", "lens = np.array([len(tok.input_ids) for tok in toks])\n", "print(pd.DataFrame(lens).describe())\n", "\n", "z = zip(q_s,lens)\n", "q_red = [a for a,b in z if b < 100]\n", "z = zip(a_s,lens)\n", "a_red = [a for a,b in z if b < 100]\n", "\n", "data_red = [q_red[i] + a_red[i] for i in range(len(q_red))]\n", "print(len(data_red))\n", "\n" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "application/vnd.jupyter.widget-view+json": { "model_id": "1ce4b83c8c7d4495b5b31732c77862ea", "version_major": 2, "version_minor": 0 }, "text/plain": [ "Map: 0%| | 0/32084 [00:00, ? examples/s]" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "import random, datasets\n", "#d = {'prompt': random.sample(data_red, 1000)}\n", "d = {'prompt': data_red}\n", "\n", "data = datasets.Dataset.from_dict(d)\n", "data = data.map(lambda x:\n", " tokenizer(\n", " x['prompt'],\n", " truncation=True,\n", " max_length=100,\n", " padding=\"max_length\"\n", " ))\n", "\n", "data = data.remove_columns('prompt')\n" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "from peft import LoraConfig, get_peft_model\n", "import transformers\n", "import datasets\n", "\n", "LORA_R = 4\n", "LORA_ALPHA = 16\n", "LORA_DROPOUT = .1\n", "BATCH = 128\n", "MICRO_BATCH = 4\n", "N_GAS = BATCH//MICRO_BATCH\n", "EPOCHS = 2\n", "LR = 1e-5\n", "\n", "lora_cfg = LoraConfig(\n", " r = LORA_R,\n", " lora_alpha=LORA_ALPHA,\n", " lora_dropout=LORA_DROPOUT,\n", " task_type='CASUAL_LM',\n", " target_modules=['q_proj','v_proj']\n", ")\n", "\n", "model = get_peft_model(model,lora_cfg)\n", "\n", "targs = transformers.TrainingArguments(\n", " per_device_train_batch_size=MICRO_BATCH,\n", " gradient_accumulation_steps=N_GAS,\n", " warmup_steps=0,\n", " num_train_epochs=EPOCHS,\n", " learning_rate=LR,\n", " fp16=True,\n", " logging_steps=1,\n", " output_dir='sqllama-out3',\n", " save_total_limit=3,\n", " remove_unused_columns=False\n", ")\n" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "data": { "text/html": [ "\n", "
| Step | \n", "Training Loss | \n", "
|---|---|
| 1 | \n", "2.748800 | \n", "
| 2 | \n", "2.723800 | \n", "
| 3 | \n", "2.737600 | \n", "
| 4 | \n", "2.707100 | \n", "
| 5 | \n", "2.692800 | \n", "
| 6 | \n", "2.720700 | \n", "
| 7 | \n", "2.681400 | \n", "
| 8 | \n", "2.736400 | \n", "
| 9 | \n", "2.701800 | \n", "
| 10 | \n", "2.711700 | \n", "
| 11 | \n", "2.685800 | \n", "
| 12 | \n", "2.684300 | \n", "
| 13 | \n", "2.686300 | \n", "
| 14 | \n", "2.698800 | \n", "
| 15 | \n", "2.659300 | \n", "
| 16 | \n", "2.688900 | \n", "
| 17 | \n", "2.661800 | \n", "
| 18 | \n", "2.677700 | \n", "
| 19 | \n", "2.647100 | \n", "
| 20 | \n", "2.679800 | \n", "
| 21 | \n", "2.652000 | \n", "
| 22 | \n", "2.628900 | \n", "
| 23 | \n", "2.656100 | \n", "
| 24 | \n", "2.669100 | \n", "
| 25 | \n", "2.667800 | \n", "
| 26 | \n", "2.636300 | \n", "
| 27 | \n", "2.616800 | \n", "
| 28 | \n", "2.630600 | \n", "
| 29 | \n", "2.621000 | \n", "
| 30 | \n", "2.602000 | \n", "
| 31 | \n", "2.607900 | \n", "
| 32 | \n", "2.635800 | \n", "
| 33 | \n", "2.594600 | \n", "
| 34 | \n", "2.604400 | \n", "
| 35 | \n", "2.618900 | \n", "
| 36 | \n", "2.563400 | \n", "
| 37 | \n", "2.589200 | \n", "
| 38 | \n", "2.552100 | \n", "
| 39 | \n", "2.583600 | \n", "
| 40 | \n", "2.554500 | \n", "
| 41 | \n", "2.557400 | \n", "
| 42 | \n", "2.536700 | \n", "
| 43 | \n", "2.535000 | \n", "
| 44 | \n", "2.557900 | \n", "
| 45 | \n", "2.530100 | \n", "
| 46 | \n", "2.527900 | \n", "
| 47 | \n", "2.510100 | \n", "
| 48 | \n", "2.539100 | \n", "
| 49 | \n", "2.500100 | \n", "
| 50 | \n", "2.536200 | \n", "
| 51 | \n", "2.487100 | \n", "
| 52 | \n", "2.521700 | \n", "
| 53 | \n", "2.532600 | \n", "
| 54 | \n", "2.494500 | \n", "
| 55 | \n", "2.468900 | \n", "
| 56 | \n", "2.468700 | \n", "
| 57 | \n", "2.474300 | \n", "
| 58 | \n", "2.480900 | \n", "
| 59 | \n", "2.442800 | \n", "
| 60 | \n", "2.472800 | \n", "
| 61 | \n", "2.452900 | \n", "
| 62 | \n", "2.452000 | \n", "
| 63 | \n", "2.443100 | \n", "
| 64 | \n", "2.446700 | \n", "
| 65 | \n", "2.415100 | \n", "
| 66 | \n", "2.376300 | \n", "
| 67 | \n", "2.411500 | \n", "
| 68 | \n", "2.403900 | \n", "
| 69 | \n", "2.383800 | \n", "
| 70 | \n", "2.427800 | \n", "
| 71 | \n", "2.419400 | \n", "
| 72 | \n", "2.371900 | \n", "
| 73 | \n", "2.364400 | \n", "
| 74 | \n", "2.360000 | \n", "
| 75 | \n", "2.337600 | \n", "
| 76 | \n", "2.332800 | \n", "
| 77 | \n", "2.315700 | \n", "
| 78 | \n", "2.344200 | \n", "
| 79 | \n", "2.331700 | \n", "
| 80 | \n", "2.303100 | \n", "
| 81 | \n", "2.324700 | \n", "
| 82 | \n", "2.285900 | \n", "
| 83 | \n", "2.268000 | \n", "
| 84 | \n", "2.260600 | \n", "
| 85 | \n", "2.286100 | \n", "
| 86 | \n", "2.233600 | \n", "
| 87 | \n", "2.266200 | \n", "
| 88 | \n", "2.217000 | \n", "
| 89 | \n", "2.249300 | \n", "
| 90 | \n", "2.239000 | \n", "
| 91 | \n", "2.221900 | \n", "
| 92 | \n", "2.223300 | \n", "
| 93 | \n", "2.179500 | \n", "
| 94 | \n", "2.204400 | \n", "
| 95 | \n", "2.193200 | \n", "
| 96 | \n", "2.163800 | \n", "
| 97 | \n", "2.158200 | \n", "
| 98 | \n", "2.127700 | \n", "
| 99 | \n", "2.141400 | \n", "
| 100 | \n", "2.121400 | \n", "
| 101 | \n", "2.115500 | \n", "
| 102 | \n", "2.125200 | \n", "
| 103 | \n", "2.140100 | \n", "
| 104 | \n", "2.118400 | \n", "
| 105 | \n", "2.110400 | \n", "
| 106 | \n", "2.097300 | \n", "
| 107 | \n", "2.071400 | \n", "
| 108 | \n", "2.083400 | \n", "
| 109 | \n", "2.090200 | \n", "
| 110 | \n", "2.078200 | \n", "
| 111 | \n", "2.061100 | \n", "
| 112 | \n", "2.047500 | \n", "
| 113 | \n", "2.006100 | \n", "
| 114 | \n", "2.023800 | \n", "
| 115 | \n", "2.014000 | \n", "
| 116 | \n", "2.008800 | \n", "
| 117 | \n", "1.988800 | \n", "
| 118 | \n", "1.984900 | \n", "
| 119 | \n", "1.971000 | \n", "
| 120 | \n", "1.924100 | \n", "
| 121 | \n", "1.953100 | \n", "
| 122 | \n", "1.957800 | \n", "
| 123 | \n", "1.952500 | \n", "
| 124 | \n", "1.890400 | \n", "
| 125 | \n", "1.915900 | \n", "
| 126 | \n", "1.901100 | \n", "
| 127 | \n", "1.879900 | \n", "
| 128 | \n", "1.834100 | \n", "
| 129 | \n", "1.855900 | \n", "
| 130 | \n", "1.853800 | \n", "
| 131 | \n", "1.869200 | \n", "
| 132 | \n", "1.821400 | \n", "
| 133 | \n", "1.835100 | \n", "
| 134 | \n", "1.817700 | \n", "
| 135 | \n", "1.785800 | \n", "
| 136 | \n", "1.764000 | \n", "
| 137 | \n", "1.796800 | \n", "
| 138 | \n", "1.751100 | \n", "
| 139 | \n", "1.756500 | \n", "
| 140 | \n", "1.789900 | \n", "
| 141 | \n", "1.773100 | \n", "
| 142 | \n", "1.729200 | \n", "
| 143 | \n", "1.700200 | \n", "
| 144 | \n", "1.721200 | \n", "
| 145 | \n", "1.690600 | \n", "
| 146 | \n", "1.687700 | \n", "
| 147 | \n", "1.743500 | \n", "
| 148 | \n", "1.690000 | \n", "
| 149 | \n", "1.687200 | \n", "
| 150 | \n", "1.663000 | \n", "
| 151 | \n", "1.648600 | \n", "
| 152 | \n", "1.667100 | \n", "
| 153 | \n", "1.665600 | \n", "
| 154 | \n", "1.647000 | \n", "
| 155 | \n", "1.629500 | \n", "
| 156 | \n", "1.620800 | \n", "
| 157 | \n", "1.616400 | \n", "
| 158 | \n", "1.658500 | \n", "
| 159 | \n", "1.593900 | \n", "
| 160 | \n", "1.604300 | \n", "
| 161 | \n", "1.621200 | \n", "
| 162 | \n", "1.607900 | \n", "
| 163 | \n", "1.591100 | \n", "
| 164 | \n", "1.598100 | \n", "
| 165 | \n", "1.579700 | \n", "
| 166 | \n", "1.545500 | \n", "
| 167 | \n", "1.582100 | \n", "
| 168 | \n", "1.568300 | \n", "
| 169 | \n", "1.557900 | \n", "
| 170 | \n", "1.561300 | \n", "
| 171 | \n", "1.521800 | \n", "
| 172 | \n", "1.542500 | \n", "
| 173 | \n", "1.502300 | \n", "
| 174 | \n", "1.513900 | \n", "
| 175 | \n", "1.501500 | \n", "
| 176 | \n", "1.551200 | \n", "
| 177 | \n", "1.495600 | \n", "
| 178 | \n", "1.504000 | \n", "
| 179 | \n", "1.512500 | \n", "
| 180 | \n", "1.488200 | \n", "
| 181 | \n", "1.492200 | \n", "
| 182 | \n", "1.494300 | \n", "
| 183 | \n", "1.494800 | \n", "
| 184 | \n", "1.446100 | \n", "
| 185 | \n", "1.514700 | \n", "
| 186 | \n", "1.450900 | \n", "
| 187 | \n", "1.476900 | \n", "
| 188 | \n", "1.447100 | \n", "
| 189 | \n", "1.490800 | \n", "
| 190 | \n", "1.433200 | \n", "
| 191 | \n", "1.438100 | \n", "
| 192 | \n", "1.410500 | \n", "
| 193 | \n", "1.422600 | \n", "
| 194 | \n", "1.405500 | \n", "
| 195 | \n", "1.439400 | \n", "
| 196 | \n", "1.448100 | \n", "
| 197 | \n", "1.410200 | \n", "
| 198 | \n", "1.403800 | \n", "
| 199 | \n", "1.464400 | \n", "
| 200 | \n", "1.417700 | \n", "
| 201 | \n", "1.419500 | \n", "
| 202 | \n", "1.419400 | \n", "
| 203 | \n", "1.387700 | \n", "
| 204 | \n", "1.400400 | \n", "
| 205 | \n", "1.404700 | \n", "
| 206 | \n", "1.398400 | \n", "
| 207 | \n", "1.358000 | \n", "
| 208 | \n", "1.359600 | \n", "
| 209 | \n", "1.367700 | \n", "
| 210 | \n", "1.358600 | \n", "
| 211 | \n", "1.369200 | \n", "
| 212 | \n", "1.373700 | \n", "
| 213 | \n", "1.395100 | \n", "
| 214 | \n", "1.360800 | \n", "
| 215 | \n", "1.343900 | \n", "
| 216 | \n", "1.330300 | \n", "
| 217 | \n", "1.328800 | \n", "
| 218 | \n", "1.369900 | \n", "
| 219 | \n", "1.346300 | \n", "
| 220 | \n", "1.379700 | \n", "
| 221 | \n", "1.326000 | \n", "
| 222 | \n", "1.334600 | \n", "
| 223 | \n", "1.339100 | \n", "
| 224 | \n", "1.349200 | \n", "
| 225 | \n", "1.324800 | \n", "
| 226 | \n", "1.303600 | \n", "
| 227 | \n", "1.299900 | \n", "
| 228 | \n", "1.338800 | \n", "
| 229 | \n", "1.331800 | \n", "
| 230 | \n", "1.351400 | \n", "
| 231 | \n", "1.314200 | \n", "
| 232 | \n", "1.293600 | \n", "
| 233 | \n", "1.322100 | \n", "
| 234 | \n", "1.295800 | \n", "
| 235 | \n", "1.302500 | \n", "
| 236 | \n", "1.338900 | \n", "
| 237 | \n", "1.308900 | \n", "
| 238 | \n", "1.290100 | \n", "
| 239 | \n", "1.323300 | \n", "
| 240 | \n", "1.270500 | \n", "
| 241 | \n", "1.246300 | \n", "
| 242 | \n", "1.303900 | \n", "
| 243 | \n", "1.324800 | \n", "
| 244 | \n", "1.216000 | \n", "
| 245 | \n", "1.303500 | \n", "
| 246 | \n", "1.304900 | \n", "
| 247 | \n", "1.273300 | \n", "
| 248 | \n", "1.278300 | \n", "
| 249 | \n", "1.252000 | \n", "
| 250 | \n", "1.283400 | \n", "
| 251 | \n", "1.271600 | \n", "
| 252 | \n", "1.300300 | \n", "
| 253 | \n", "1.265800 | \n", "
| 254 | \n", "1.249200 | \n", "
| 255 | \n", "1.252600 | \n", "
| 256 | \n", "1.265500 | \n", "
| 257 | \n", "1.228600 | \n", "
| 258 | \n", "1.257300 | \n", "
| 259 | \n", "1.288900 | \n", "
| 260 | \n", "1.257200 | \n", "
| 261 | \n", "1.243700 | \n", "
| 262 | \n", "1.272100 | \n", "
| 263 | \n", "1.252000 | \n", "
| 264 | \n", "1.264900 | \n", "
| 265 | \n", "1.268800 | \n", "
| 266 | \n", "1.256000 | \n", "
| 267 | \n", "1.230200 | \n", "
| 268 | \n", "1.231700 | \n", "
| 269 | \n", "1.243400 | \n", "
| 270 | \n", "1.285200 | \n", "
| 271 | \n", "1.225500 | \n", "
| 272 | \n", "1.217900 | \n", "
| 273 | \n", "1.209200 | \n", "
| 274 | \n", "1.224200 | \n", "
| 275 | \n", "1.226400 | \n", "
| 276 | \n", "1.261500 | \n", "
| 277 | \n", "1.223900 | \n", "
| 278 | \n", "1.244000 | \n", "
| 279 | \n", "1.226600 | \n", "
| 280 | \n", "1.235000 | \n", "
| 281 | \n", "1.213400 | \n", "
| 282 | \n", "1.177600 | \n", "
| 283 | \n", "1.218100 | \n", "
| 284 | \n", "1.231900 | \n", "
| 285 | \n", "1.200900 | \n", "
| 286 | \n", "1.223400 | \n", "
| 287 | \n", "1.235100 | \n", "
| 288 | \n", "1.232500 | \n", "
| 289 | \n", "1.230100 | \n", "
| 290 | \n", "1.225900 | \n", "
| 291 | \n", "1.182700 | \n", "
| 292 | \n", "1.237100 | \n", "
| 293 | \n", "1.201000 | \n", "
| 294 | \n", "1.213000 | \n", "
| 295 | \n", "1.205500 | \n", "
| 296 | \n", "1.181900 | \n", "
| 297 | \n", "1.198300 | \n", "
| 298 | \n", "1.195200 | \n", "
| 299 | \n", "1.215000 | \n", "
| 300 | \n", "1.195500 | \n", "
| 301 | \n", "1.186100 | \n", "
| 302 | \n", "1.174900 | \n", "
| 303 | \n", "1.184400 | \n", "
| 304 | \n", "1.207100 | \n", "
| 305 | \n", "1.181100 | \n", "
| 306 | \n", "1.195300 | \n", "
| 307 | \n", "1.189000 | \n", "
| 308 | \n", "1.180200 | \n", "
| 309 | \n", "1.167200 | \n", "
| 310 | \n", "1.206700 | \n", "
| 311 | \n", "1.203600 | \n", "
| 312 | \n", "1.186600 | \n", "
| 313 | \n", "1.224100 | \n", "
| 314 | \n", "1.180000 | \n", "
| 315 | \n", "1.186600 | \n", "
| 316 | \n", "1.150700 | \n", "
| 317 | \n", "1.165700 | \n", "
| 318 | \n", "1.178100 | \n", "
| 319 | \n", "1.148300 | \n", "
| 320 | \n", "1.153600 | \n", "
| 321 | \n", "1.189200 | \n", "
| 322 | \n", "1.182100 | \n", "
| 323 | \n", "1.183800 | \n", "
| 324 | \n", "1.202900 | \n", "
| 325 | \n", "1.196600 | \n", "
| 326 | \n", "1.200800 | \n", "
| 327 | \n", "1.153100 | \n", "
| 328 | \n", "1.212400 | \n", "
| 329 | \n", "1.167300 | \n", "
| 330 | \n", "1.188300 | \n", "
| 331 | \n", "1.179300 | \n", "
| 332 | \n", "1.211400 | \n", "
| 333 | \n", "1.169900 | \n", "
| 334 | \n", "1.179300 | \n", "
| 335 | \n", "1.153300 | \n", "
| 336 | \n", "1.188900 | \n", "
| 337 | \n", "1.179200 | \n", "
| 338 | \n", "1.217300 | \n", "
| 339 | \n", "1.169700 | \n", "
| 340 | \n", "1.177700 | \n", "
| 341 | \n", "1.197300 | \n", "
| 342 | \n", "1.177800 | \n", "
| 343 | \n", "1.169700 | \n", "
| 344 | \n", "1.186800 | \n", "
| 345 | \n", "1.180000 | \n", "
| 346 | \n", "1.193400 | \n", "
| 347 | \n", "1.171900 | \n", "
| 348 | \n", "1.190000 | \n", "
| 349 | \n", "1.160900 | \n", "
| 350 | \n", "1.170800 | \n", "
| 351 | \n", "1.166900 | \n", "
| 352 | \n", "1.183200 | \n", "
| 353 | \n", "1.118200 | \n", "
| 354 | \n", "1.185900 | \n", "
| 355 | \n", "1.157800 | \n", "
| 356 | \n", "1.160200 | \n", "
| 357 | \n", "1.184200 | \n", "
| 358 | \n", "1.172100 | \n", "
| 359 | \n", "1.143800 | \n", "
| 360 | \n", "1.178000 | \n", "
| 361 | \n", "1.157900 | \n", "
| 362 | \n", "1.151700 | \n", "
| 363 | \n", "1.196600 | \n", "
| 364 | \n", "1.181800 | \n", "
| 365 | \n", "1.195600 | \n", "
| 366 | \n", "1.165000 | \n", "
| 367 | \n", "1.157300 | \n", "
| 368 | \n", "1.165200 | \n", "
| 369 | \n", "1.167700 | \n", "
| 370 | \n", "1.184900 | \n", "
| 371 | \n", "1.168400 | \n", "
| 372 | \n", "1.150500 | \n", "
| 373 | \n", "1.152900 | \n", "
| 374 | \n", "1.158900 | \n", "
| 375 | \n", "1.143900 | \n", "
| 376 | \n", "1.157200 | \n", "
| 377 | \n", "1.146800 | \n", "
| 378 | \n", "1.142600 | \n", "
| 379 | \n", "1.140600 | \n", "
| 380 | \n", "1.142400 | \n", "
| 381 | \n", "1.114100 | \n", "
| 382 | \n", "1.169700 | \n", "
| 383 | \n", "1.142500 | \n", "
| 384 | \n", "1.176000 | \n", "
| 385 | \n", "1.160600 | \n", "
| 386 | \n", "1.164700 | \n", "
| 387 | \n", "1.124000 | \n", "
| 388 | \n", "1.134500 | \n", "
| 389 | \n", "1.185500 | \n", "
| 390 | \n", "1.154300 | \n", "
| 391 | \n", "1.125500 | \n", "
| 392 | \n", "1.174400 | \n", "
| 393 | \n", "1.132800 | \n", "
| 394 | \n", "1.145200 | \n", "
| 395 | \n", "1.129800 | \n", "
| 396 | \n", "1.140600 | \n", "
| 397 | \n", "1.126000 | \n", "
| 398 | \n", "1.182800 | \n", "
| 399 | \n", "1.127800 | \n", "
| 400 | \n", "1.155000 | \n", "
| 401 | \n", "1.134600 | \n", "
| 402 | \n", "1.155900 | \n", "
| 403 | \n", "1.150400 | \n", "
| 404 | \n", "1.141700 | \n", "
| 405 | \n", "1.131500 | \n", "
| 406 | \n", "1.169600 | \n", "
| 407 | \n", "1.170500 | \n", "
| 408 | \n", "1.129100 | \n", "
| 409 | \n", "1.151700 | \n", "
| 410 | \n", "1.168200 | \n", "
| 411 | \n", "1.109100 | \n", "
| 412 | \n", "1.129700 | \n", "
| 413 | \n", "1.143900 | \n", "
| 414 | \n", "1.157300 | \n", "
| 415 | \n", "1.128900 | \n", "
| 416 | \n", "1.171500 | \n", "
| 417 | \n", "1.141600 | \n", "
| 418 | \n", "1.157700 | \n", "
| 419 | \n", "1.137000 | \n", "
| 420 | \n", "1.154000 | \n", "
| 421 | \n", "1.167300 | \n", "
| 422 | \n", "1.137400 | \n", "
| 423 | \n", "1.121500 | \n", "
| 424 | \n", "1.128500 | \n", "
| 425 | \n", "1.130300 | \n", "
| 426 | \n", "1.162100 | \n", "
| 427 | \n", "1.155100 | \n", "
| 428 | \n", "1.145300 | \n", "
| 429 | \n", "1.121000 | \n", "
| 430 | \n", "1.182200 | \n", "
| 431 | \n", "1.157000 | \n", "
| 432 | \n", "1.162300 | \n", "
| 433 | \n", "1.135200 | \n", "
| 434 | \n", "1.141300 | \n", "
| 435 | \n", "1.151700 | \n", "
| 436 | \n", "1.148000 | \n", "
| 437 | \n", "1.132500 | \n", "
| 438 | \n", "1.163000 | \n", "
| 439 | \n", "1.116300 | \n", "
| 440 | \n", "1.142000 | \n", "
| 441 | \n", "1.091700 | \n", "
| 442 | \n", "1.141500 | \n", "
| 443 | \n", "1.154900 | \n", "
| 444 | \n", "1.120400 | \n", "
| 445 | \n", "1.173700 | \n", "
| 446 | \n", "1.138300 | \n", "
| 447 | \n", "1.135600 | \n", "
| 448 | \n", "1.138800 | \n", "
| 449 | \n", "1.126800 | \n", "
| 450 | \n", "1.129400 | \n", "
| 451 | \n", "1.146300 | \n", "
| 452 | \n", "1.104200 | \n", "
| 453 | \n", "1.163500 | \n", "
| 454 | \n", "1.169300 | \n", "
| 455 | \n", "1.147100 | \n", "
| 456 | \n", "1.157100 | \n", "
| 457 | \n", "1.122100 | \n", "
| 458 | \n", "1.121900 | \n", "
| 459 | \n", "1.150500 | \n", "
| 460 | \n", "1.115700 | \n", "
| 461 | \n", "1.121100 | \n", "
| 462 | \n", "1.123400 | \n", "
| 463 | \n", "1.097500 | \n", "
| 464 | \n", "1.103800 | \n", "
| 465 | \n", "1.167700 | \n", "
| 466 | \n", "1.130000 | \n", "
| 467 | \n", "1.164500 | \n", "
| 468 | \n", "1.127200 | \n", "
| 469 | \n", "1.133800 | \n", "
| 470 | \n", "1.132700 | \n", "
| 471 | \n", "1.122800 | \n", "
| 472 | \n", "1.159500 | \n", "
| 473 | \n", "1.122900 | \n", "
| 474 | \n", "1.105000 | \n", "
| 475 | \n", "1.145700 | \n", "
| 476 | \n", "1.086400 | \n", "
| 477 | \n", "1.112600 | \n", "
| 478 | \n", "1.139300 | \n", "
| 479 | \n", "1.135000 | \n", "
| 480 | \n", "1.135200 | \n", "
| 481 | \n", "1.117500 | \n", "
| 482 | \n", "1.102300 | \n", "
| 483 | \n", "1.147700 | \n", "
| 484 | \n", "1.119200 | \n", "
| 485 | \n", "1.125800 | \n", "
| 486 | \n", "1.135400 | \n", "
| 487 | \n", "1.149500 | \n", "
| 488 | \n", "1.099400 | \n", "
| 489 | \n", "1.153900 | \n", "
| 490 | \n", "1.122700 | \n", "
| 491 | \n", "1.089400 | \n", "
| 492 | \n", "1.167200 | \n", "
| 493 | \n", "1.151300 | \n", "
| 494 | \n", "1.131400 | \n", "
| 495 | \n", "1.131400 | \n", "
| 496 | \n", "1.145200 | \n", "
| 497 | \n", "1.125700 | \n", "
| 498 | \n", "1.119300 | \n", "
| 499 | \n", "1.128600 | \n", "
| 500 | \n", "1.121000 | \n", "
"
],
"text/plain": [
"