You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

01_bert_custom.ipynb 47KB

3 months ago
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539
  1. {
  2. "cells": [
  3. {
  4. "cell_type": "code",
  5. "execution_count": 1,
  6. "id": "bce6d2a3-c3df-46f9-926e-2dda07dc9a3d",
  7. "metadata": {
  8. "tags": []
  9. },
  10. "outputs": [],
  11. "source": [
  12. "from types import SimpleNamespace\n",
  13. "from typing import Optional\n",
  14. "\n",
  15. "import torch\n",
  16. "import torch.nn as nn"
  17. ]
  18. },
  19. {
  20. "cell_type": "code",
  21. "execution_count": 2,
  22. "id": "5095bac0-f9ef-4aee-8050-acab81ee0d6f",
  23. "metadata": {
  24. "tags": []
  25. },
  26. "outputs": [],
  27. "source": [
  28. "DEVICE = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")\n",
  29. "MODEL_NAME = 'bert-base-uncased'\n",
  30. "NAMESPACE = 'sadcl'\n",
  31. "\n",
  32. "NTOKENS = 10\n",
  33. "PROMPT_PLACE = 'post' # pre"
  34. ]
  35. },
  36. {
  37. "cell_type": "code",
  38. "execution_count": 6,
  39. "id": "ad41bd6e-d7f5-4c4b-a4fd-de039bb9b8c7",
  40. "metadata": {
  41. "tags": []
  42. },
  43. "outputs": [],
  44. "source": [
  45. "def initialize_embedding(\n",
  46. " emb_dim: int,\n",
  47. " n_tokens: int, \n",
  48. " random_range: float,\n",
  49. " initialize_from: Optional[torch.Tensor] = None\n",
  50. "):\n",
  51. " if initialize_from is None:\n",
  52. " return torch.FloatTensor(n_tokens, emb_dim).uniform_(-random_range, random_range)\n",
  53. "\n",
  54. " assert initialize_from.shape == (n_tokens, )\n",
  55. "\n",
  56. " return initialize_from.clone().detach().tile(1, emb_dim)\n",
  57. "\n",
  58. "class SoftEmbedding(nn.Module):\n",
  59. " def __init__(\n",
  60. " self,\n",
  61. " emb_dim: int,\n",
  62. " n_tokens: int, \n",
  63. " random_range: float = 0.5,\n",
  64. " prompt_place: str = 'post',\n",
  65. " mode: str = 'cat',\n",
  66. " initialize_from: Optional[torch.Tensor] = None\n",
  67. " ):\n",
  68. " super().__init__()\n",
  69. " assert mode in ['cat', 'add']\n",
  70. " assert prompt_place in ['pre', 'post']\n",
  71. " \n",
  72. " self.post_tokenizer_map = {\n",
  73. " 'input_ids': 0,\n",
  74. " 'attention_mask': 1,\n",
  75. " 'token_type_ids': 0\n",
  76. " }\n",
  77. " self.n_tokens = n_tokens\n",
  78. " self.mode = mode\n",
  79. " self.prompt_place = prompt_place\n",
  80. " \n",
  81. " self.sadcl_learned_embedding = nn.parameter.Parameter(\n",
  82. " initialize_embedding(\n",
  83. " emb_dim,\n",
  84. " n_tokens,\n",
  85. " random_range,\n",
  86. " initialize_from\n",
  87. " )\n",
  88. " )\n",
  89. "\n",
  90. " assert self.sadcl_learned_embedding.shape == (n_tokens, emb_dim)\n",
  91. " \n",
  92. " def forward(self, input_embedding):\n",
  93. " # input_embedding.shape = (batch_size, num_of_input_tokens, emb_dim)\n",
  94. " batch_size = input_embedding.size(0)\n",
  95. " if self.mode == 'cat':\n",
  96. " learned_embedding = self.sadcl_learned_embedding.repeat(batch_size, 1, 1) # (batch_size, n_tokens, emb_dim)\n",
  97. " return self.concat_batch(input_embedding[self.get_slice_for_cat()], learned_embedding)\n",
  98. " else: # mode == add\n",
  99. " input_embedding[self.get_slice_for_add()] += self.sadcl_learned_embedding[None, :, :]\n",
  100. " return input_embedding\n",
  101. " \n",
  102. " def get_weights(self):\n",
  103. " return self.sadcl_learned_embedding.detach().clone()\n",
  104. " \n",
  105. " def set_weights(self, new_weights: torch.Tensor):\n",
  106. " self.sadcl_learned_embedding.data = new_weights\n",
  107. " \n",
  108. " def get_slice_for_add(self):\n",
  109. " if self.prompt_place == 'pre':\n",
  110. " return slice(None), slice(None, self.n_tokens), slice(None)\n",
  111. " else: # prompt_place == post\n",
  112. " return slice(None), slice(-self.n_tokens, None), slice(None)\n",
  113. " \n",
  114. " def get_slice_for_cat(self):\n",
  115. " if self.prompt_place == 'pre':\n",
  116. " return slice(None), slice(self.n_tokens, None), slice(None)\n",
  117. " else: # prompt_place == post\n",
  118. " return slice(None), slice(None, -self.n_tokens), slice(None)\n",
  119. " \n",
  120. " def concat_batch(self, orig_vals, new_vals):\n",
  121. " if self.prompt_place == 'pre':\n",
  122. " return torch.cat([new_vals, orig_vals], axis=1)\n",
  123. " else: # prompt_place == post\n",
  124. " return torch.cat([orig_vals, new_vals], axis=1)\n",
  125. " \n",
  126. " def post_tokenizer(self, **kwargs):\n",
  127. " for special_key, pad_val in self.post_tokenizer_map.items():\n",
  128. " if special_key in kwargs:\n",
  129. " orig_tokens = kwargs[special_key]\n",
  130. " batch_size = kwargs[special_key].size(0)\n",
  131. " new_vals = torch.full(\n",
  132. " size=(batch_size, self.n_tokens),\n",
  133. " fill_value=pad_val,\n",
  134. " dtype=orig_tokens.dtype,\n",
  135. " device=orig_tokens.device\n",
  136. " )\n",
  137. " kwargs[special_key].data = self.concat_batch(orig_tokens, new_vals)\n",
  138. " return kwargs\n",
  139. "\n",
  140. "class TransformerInjector(nn.Module):\n",
  141. " def __init__(self, module):\n",
  142. " super().__init__()\n",
  143. " self.original_module = module\n",
  144. " self.add_prompt = SoftEmbedding(\n",
  145. " emb_dim=module.output.dense.out_features,\n",
  146. " n_tokens=NTOKENS,\n",
  147. " prompt_place=PROMPT_PLACE,\n",
  148. " mode='add'\n",
  149. " )\n",
  150. " \n",
  151. " def forward(self, hidden_states, *args, **kwargs):\n",
  152. " hidden_states = self.add_prompt(hidden_states)\n",
  153. " return self.original_module(hidden_states, *args, **kwargs)\n",
  154. " \n",
  155. " @classmethod\n",
  156. " def muatate_list(cls, module_list):\n",
  157. " for idx, module in enumerate(module_list):\n",
  158. " module_list[idx] = cls(module)\n",
  159. " return module_list\n",
  160. " \n",
  161. "class NewEmbeddingLayer(nn.Module):\n",
  162. " def __init__(self, emb_layer=nn.Embedding):\n",
  163. " super().__init__()\n",
  164. " self.emb_layer = emb_layer\n",
  165. " self.soft_prompt = SoftEmbedding(\n",
  166. " emb_dim=emb_layer.weight.size(1),\n",
  167. " n_tokens=NTOKENS,\n",
  168. " prompt_place=PROMPT_PLACE\n",
  169. " )\n",
  170. " \n",
  171. " def forward(self, tokens):\n",
  172. " out = self.emb_layer(tokens)\n",
  173. " out = self.soft_prompt(out)\n",
  174. " return out\n",
  175. " \n",
  176. " def get_weights(self):\n",
  177. " return self.soft_prompt.get_weights()\n",
  178. " \n",
  179. " def set_weights(self, new_weights):\n",
  180. " self.soft_prompt.set_weights(new_weights)\n",
  181. " \n",
  182. " @classmethod\n",
  183. " def mutate(cls, model):\n",
  184. " emb_layer = model.get_input_embeddings()\n",
  185. " new_emb_layer = cls(emb_layer)\n",
  186. " model.set_input_embeddings(new_emb_layer)\n",
  187. " \n",
  188. " orig_forward = model.forward\n",
  189. " \n",
  190. " def new_forward(**kwargs):\n",
  191. " new_kwargs = new_emb_layer.soft_prompt.post_tokenizer(**kwargs)\n",
  192. " return orig_forward(**new_kwargs)\n",
  193. " \n",
  194. " model.forward = new_forward\n",
  195. " return new_emb_layer"
  196. ]
  197. },
  198. {
  199. "cell_type": "code",
  200. "execution_count": 7,
  201. "id": "79bf6687-5a88-4181-88dc-740d11dd89ac",
  202. "metadata": {
  203. "tags": []
  204. },
  205. "outputs": [
  206. {
  207. "name": "stderr",
  208. "output_type": "stream",
  209. "text": [
  210. "Some weights of BertForSequenceClassification were not initialized from the model checkpoint at bert-base-uncased and are newly initialized: ['classifier.bias', 'classifier.weight']\n",
  211. "You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.\n"
  212. ]
  213. }
  214. ],
  215. "source": [
  216. "from transformers import BertForSequenceClassification, BertTokenizerFast\n",
  217. "\n",
  218. "model = BertForSequenceClassification.from_pretrained(MODEL_NAME)\n",
  219. "tokenizer = BertTokenizerFast.from_pretrained(MODEL_NAME)\n",
  220. "\n",
  221. "peft_module = NewEmbeddingLayer.mutate(model)\n",
  222. "peft_bert_layers = TransformerInjector.muatate_list(model.bert.encoder.layer)\n",
  223. "\n",
  224. "model.to(DEVICE);"
  225. ]
  226. },
  227. {
  228. "cell_type": "code",
  229. "execution_count": 23,
  230. "id": "c0b0a48e-0b0b-43de-ae78-d2521cfee69e",
  231. "metadata": {
  232. "tags": []
  233. },
  234. "outputs": [
  235. {
  236. "data": {
  237. "text/plain": [
  238. "tensor([[-0.2546, -0.0352, -0.4110, ..., 0.0189, 0.4121, 0.2206],\n",
  239. " [ 0.0670, 0.0600, 0.4493, ..., -0.4346, 0.4130, -0.3507],\n",
  240. " [ 0.0827, 0.3569, 0.0943, ..., -0.3451, -0.1879, 0.0831],\n",
  241. " ...,\n",
  242. " [-0.0489, -0.2570, -0.3328, ..., -0.4109, 0.0884, -0.0290],\n",
  243. " [-0.2705, -0.3854, 0.4559, ..., -0.0480, -0.4039, 0.4245],\n",
  244. " [-0.1941, 0.2237, 0.3494, ..., -0.1199, -0.3030, -0.1530]],\n",
  245. " device='cuda:0')"
  246. ]
  247. },
  248. "execution_count": 23,
  249. "metadata": {},
  250. "output_type": "execute_result"
  251. }
  252. ],
  253. "source": [
  254. "old_w = peft_module.get_weights()\n",
  255. "old_w"
  256. ]
  257. },
  258. {
  259. "cell_type": "code",
  260. "execution_count": 24,
  261. "id": "d3753569-c95f-4f8e-99ec-e6f990ec55a8",
  262. "metadata": {
  263. "tags": []
  264. },
  265. "outputs": [],
  266. "source": [
  267. "# tokens = tokenizer(\"Hi bye\", return_tensors='pt').to(DEVICE)\n",
  268. "\n",
  269. "# model.eval()\n",
  270. "# with torch.no_grad():\n",
  271. "# out = model(**tokens)\n",
  272. "# out"
  273. ]
  274. },
  275. {
  276. "cell_type": "code",
  277. "execution_count": 3,
  278. "id": "23b6f5b1-bbb7-43b9-b5a9-e62d313f4244",
  279. "metadata": {
  280. "tags": []
  281. },
  282. "outputs": [
  283. {
  284. "name": "stderr",
  285. "output_type": "stream",
  286. "text": [
  287. "Found cached dataset glue (/home/mohalisad/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad)\n"
  288. ]
  289. },
  290. {
  291. "data": {
  292. "application/vnd.jupyter.widget-view+json": {
  293. "model_id": "e2bc8f1df0934619941ae8e37e2be807",
  294. "version_major": 2,
  295. "version_minor": 0
  296. },
  297. "text/plain": [
  298. " 0%| | 0/3 [00:00<?, ?it/s]"
  299. ]
  300. },
  301. "metadata": {},
  302. "output_type": "display_data"
  303. }
  304. ],
  305. "source": [
  306. "from _datasets import AutoLoad\n",
  307. "autoload = AutoLoad()"
  308. ]
  309. },
  310. {
  311. "cell_type": "code",
  312. "execution_count": 26,
  313. "id": "45cb37be-9aee-45f6-8a8a-bf859197a7d4",
  314. "metadata": {
  315. "tags": []
  316. },
  317. "outputs": [
  318. {
  319. "name": "stdout",
  320. "output_type": "stream",
  321. "text": [
  322. "bert.embeddings.word_embeddings.soft_prompt.sadcl_learned_embedding\n",
  323. "bert.encoder.layer.0.add_prompt.sadcl_learned_embedding\n",
  324. "bert.encoder.layer.1.add_prompt.sadcl_learned_embedding\n",
  325. "bert.encoder.layer.2.add_prompt.sadcl_learned_embedding\n",
  326. "bert.encoder.layer.3.add_prompt.sadcl_learned_embedding\n",
  327. "bert.encoder.layer.4.add_prompt.sadcl_learned_embedding\n",
  328. "bert.encoder.layer.5.add_prompt.sadcl_learned_embedding\n",
  329. "bert.encoder.layer.6.add_prompt.sadcl_learned_embedding\n",
  330. "bert.encoder.layer.7.add_prompt.sadcl_learned_embedding\n",
  331. "bert.encoder.layer.8.add_prompt.sadcl_learned_embedding\n",
  332. "bert.encoder.layer.9.add_prompt.sadcl_learned_embedding\n",
  333. "bert.encoder.layer.10.add_prompt.sadcl_learned_embedding\n",
  334. "bert.encoder.layer.11.add_prompt.sadcl_learned_embedding\n",
  335. "classifier.weight\n",
  336. "classifier.bias\n"
  337. ]
  338. }
  339. ],
  340. "source": [
  341. "for param_name, weights in model.named_parameters():\n",
  342. " if 'classifier' in param_name or NAMESPACE in param_name:\n",
  343. " weights.requires_grad = True\n",
  344. " print(param_name)\n",
  345. " else:\n",
  346. " weights.requires_grad = False"
  347. ]
  348. },
  349. {
  350. "cell_type": "code",
  351. "execution_count": 8,
  352. "id": "47f78a61-710f-410d-8f49-19da12eef09a",
  353. "metadata": {
  354. "tags": []
  355. },
  356. "outputs": [
  357. {
  358. "data": {
  359. "application/vnd.jupyter.widget-view+json": {
  360. "model_id": "",
  361. "version_major": 2,
  362. "version_minor": 0
  363. },
  364. "text/plain": [
  365. "Map: 0%| | 0/8551 [00:00<?, ? examples/s]"
  366. ]
  367. },
  368. "metadata": {},
  369. "output_type": "display_data"
  370. },
  371. {
  372. "data": {
  373. "application/vnd.jupyter.widget-view+json": {
  374. "model_id": "",
  375. "version_major": 2,
  376. "version_minor": 0
  377. },
  378. "text/plain": [
  379. "Map: 0%| | 0/1043 [00:00<?, ? examples/s]"
  380. ]
  381. },
  382. "metadata": {},
  383. "output_type": "display_data"
  384. },
  385. {
  386. "data": {
  387. "application/vnd.jupyter.widget-view+json": {
  388. "model_id": "",
  389. "version_major": 2,
  390. "version_minor": 0
  391. },
  392. "text/plain": [
  393. "Map: 0%| | 0/1063 [00:00<?, ? examples/s]"
  394. ]
  395. },
  396. "metadata": {},
  397. "output_type": "display_data"
  398. }
  399. ],
  400. "source": [
  401. "loader_out = autoload.get_and_map(tokenizer, \"glue:cola\")\n"
  402. ]
  403. },
  404. {
  405. "cell_type": "code",
  406. "execution_count": 9,
  407. "id": "8d75737f-e5c6-4dc1-94b9-8aaa507648e2",
  408. "metadata": {
  409. "tags": []
  410. },
  411. "outputs": [
  412. {
  413. "data": {
  414. "text/plain": [
  415. "{'train': Dataset({\n",
  416. " features: ['sentence', 'label', 'idx', 'input_ids', 'token_type_ids', 'attention_mask'],\n",
  417. " num_rows: 8551\n",
  418. " }),\n",
  419. " 'valid': Dataset({\n",
  420. " features: ['sentence', 'label', 'idx', 'input_ids', 'token_type_ids', 'attention_mask'],\n",
  421. " num_rows: 1043\n",
  422. " }),\n",
  423. " 'output': {'kind': 'classification', 'range': {0, 1}}}"
  424. ]
  425. },
  426. "execution_count": 9,
  427. "metadata": {},
  428. "output_type": "execute_result"
  429. }
  430. ],
  431. "source": [
  432. "loader_out"
  433. ]
  434. },
  435. {
  436. "cell_type": "code",
  437. "execution_count": 28,
  438. "id": "2489364c-4d8d-4d69-8d52-7ac88d66e7f8",
  439. "metadata": {
  440. "tags": []
  441. },
  442. "outputs": [],
  443. "source": [
  444. "from config import load_config\n",
  445. "config = load_config('config.yaml')"
  446. ]
  447. },
  448. {
  449. "cell_type": "code",
  450. "execution_count": 29,
  451. "id": "67e68e28-b4d0-42fd-a7e7-b1321485fc78",
  452. "metadata": {
  453. "tags": []
  454. },
  455. "outputs": [
  456. {
  457. "name": "stderr",
  458. "output_type": "stream",
  459. "text": [
  460. "Loading cached processed dataset at /home/mohalisad/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad/cache-41a6799222324b5f.arrow\n",
  461. "Loading cached processed dataset at /home/mohalisad/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad/cache-9fc7d7deaf3161a2.arrow\n",
  462. "Loading cached processed dataset at /home/mohalisad/.cache/huggingface/datasets/glue/cola/1.0.0/dacbe3125aa31d7f70367a07a8a9e72a5a0bfeb5fc42e75c9db75b96da6053ad/cache-0eb862d54758b38d.arrow\n",
  463. "You're using a BertTokenizerFast tokenizer. Please note that with a fast tokenizer, using the `__call__` method is faster than using a method to encode the text followed by a call to the `pad` method to get a padded encoding.\n"
  464. ]
  465. },
  466. {
  467. "data": {
  468. "text/html": [
  469. "\n",
  470. " <div>\n",
  471. " \n",
  472. " <progress value='21440' max='21440' style='width:300px; height:20px; vertical-align: middle;'></progress>\n",
  473. " [21440/21440 07:01, Epoch 80/80]\n",
  474. " </div>\n",
  475. " <table border=\"1\" class=\"dataframe\">\n",
  476. " <thead>\n",
  477. " <tr style=\"text-align: left;\">\n",
  478. " <th>Epoch</th>\n",
  479. " <th>Training Loss</th>\n",
  480. " <th>Validation Loss</th>\n",
  481. " <th>Accuracy</th>\n",
  482. " <th>F1-score-1</th>\n",
  483. " <th>F1-score-ma</th>\n",
  484. " </tr>\n",
  485. " </thead>\n",
  486. " <tbody>\n",
  487. " <tr>\n",
  488. " <td>1</td>\n",
  489. " <td>No log</td>\n",
  490. " <td>0.655867</td>\n",
  491. " <td>0.691275</td>\n",
  492. " <td>0.817460</td>\n",
  493. " <td>0.408730</td>\n",
  494. " </tr>\n",
  495. " <tr>\n",
  496. " <td>2</td>\n",
  497. " <td>0.577800</td>\n",
  498. " <td>0.639771</td>\n",
  499. " <td>0.763183</td>\n",
  500. " <td>0.848930</td>\n",
  501. " <td>0.650629</td>\n",
  502. " </tr>\n",
  503. " <tr>\n",
  504. " <td>3</td>\n",
  505. " <td>0.577800</td>\n",
  506. " <td>0.507809</td>\n",
  507. " <td>0.766059</td>\n",
  508. " <td>0.849197</td>\n",
  509. " <td>0.663915</td>\n",
  510. " </tr>\n",
  511. " <tr>\n",
  512. " <td>4</td>\n",
  513. " <td>0.528700</td>\n",
  514. " <td>0.523820</td>\n",
  515. " <td>0.770853</td>\n",
  516. " <td>0.852195</td>\n",
  517. " <td>0.671300</td>\n",
  518. " </tr>\n",
  519. " <tr>\n",
  520. " <td>5</td>\n",
  521. " <td>0.528700</td>\n",
  522. " <td>0.480276</td>\n",
  523. " <td>0.794823</td>\n",
  524. " <td>0.861757</td>\n",
  525. " <td>0.731994</td>\n",
  526. " </tr>\n",
  527. " <tr>\n",
  528. " <td>6</td>\n",
  529. " <td>0.499800</td>\n",
  530. " <td>0.506056</td>\n",
  531. " <td>0.776606</td>\n",
  532. " <td>0.855906</td>\n",
  533. " <td>0.679552</td>\n",
  534. " </tr>\n",
  535. " <tr>\n",
  536. " <td>7</td>\n",
  537. " <td>0.499800</td>\n",
  538. " <td>0.475724</td>\n",
  539. " <td>0.795781</td>\n",
  540. " <td>0.863198</td>\n",
  541. " <td>0.730276</td>\n",
  542. " </tr>\n",
  543. " <tr>\n",
  544. " <td>8</td>\n",
  545. " <td>0.482900</td>\n",
  546. " <td>0.494971</td>\n",
  547. " <td>0.790988</td>\n",
  548. " <td>0.860614</td>\n",
  549. " <td>0.721495</td>\n",
  550. " </tr>\n",
  551. " <tr>\n",
  552. " <td>9</td>\n",
  553. " <td>0.482900</td>\n",
  554. " <td>0.478771</td>\n",
  555. " <td>0.786194</td>\n",
  556. " <td>0.858592</td>\n",
  557. " <td>0.710239</td>\n",
  558. " </tr>\n",
  559. " <tr>\n",
  560. " <td>10</td>\n",
  561. " <td>0.465700</td>\n",
  562. " <td>0.502414</td>\n",
  563. " <td>0.780441</td>\n",
  564. " <td>0.858903</td>\n",
  565. " <td>0.682151</td>\n",
  566. " </tr>\n",
  567. " <tr>\n",
  568. " <td>11</td>\n",
  569. " <td>0.465700</td>\n",
  570. " <td>0.498116</td>\n",
  571. " <td>0.794823</td>\n",
  572. " <td>0.866584</td>\n",
  573. " <td>0.711300</td>\n",
  574. " </tr>\n",
  575. " <tr>\n",
  576. " <td>12</td>\n",
  577. " <td>0.461800</td>\n",
  578. " <td>0.537117</td>\n",
  579. " <td>0.780441</td>\n",
  580. " <td>0.860281</td>\n",
  581. " <td>0.673988</td>\n",
  582. " </tr>\n",
  583. " <tr>\n",
  584. " <td>13</td>\n",
  585. " <td>0.461800</td>\n",
  586. " <td>0.465851</td>\n",
  587. " <td>0.802493</td>\n",
  588. " <td>0.868286</td>\n",
  589. " <td>0.736825</td>\n",
  590. " </tr>\n",
  591. " <tr>\n",
  592. " <td>14</td>\n",
  593. " <td>0.445000</td>\n",
  594. " <td>0.487390</td>\n",
  595. " <td>0.795781</td>\n",
  596. " <td>0.865953</td>\n",
  597. " <td>0.718691</td>\n",
  598. " </tr>\n",
  599. " <tr>\n",
  600. " <td>15</td>\n",
  601. " <td>0.435600</td>\n",
  602. " <td>0.440423</td>\n",
  603. " <td>0.801534</td>\n",
  604. " <td>0.864440</td>\n",
  605. " <td>0.747068</td>\n",
  606. " </tr>\n",
  607. " <tr>\n",
  608. " <td>16</td>\n",
  609. " <td>0.435600</td>\n",
  610. " <td>0.483897</td>\n",
  611. " <td>0.803452</td>\n",
  612. " <td>0.869344</td>\n",
  613. " <td>0.736413</td>\n",
  614. " </tr>\n",
  615. " <tr>\n",
  616. " <td>17</td>\n",
  617. " <td>0.423500</td>\n",
  618. " <td>0.461727</td>\n",
  619. " <td>0.806328</td>\n",
  620. " <td>0.872152</td>\n",
  621. " <td>0.736471</td>\n",
  622. " </tr>\n",
  623. " <tr>\n",
  624. " <td>18</td>\n",
  625. " <td>0.423500</td>\n",
  626. " <td>0.491034</td>\n",
  627. " <td>0.794823</td>\n",
  628. " <td>0.865915</td>\n",
  629. " <td>0.714590</td>\n",
  630. " </tr>\n",
  631. " <tr>\n",
  632. " <td>19</td>\n",
  633. " <td>0.410400</td>\n",
  634. " <td>0.451404</td>\n",
  635. " <td>0.806328</td>\n",
  636. " <td>0.868490</td>\n",
  637. " <td>0.750608</td>\n",
  638. " </tr>\n",
  639. " <tr>\n",
  640. " <td>20</td>\n",
  641. " <td>0.410400</td>\n",
  642. " <td>0.439862</td>\n",
  643. " <td>0.808245</td>\n",
  644. " <td>0.872611</td>\n",
  645. " <td>0.742507</td>\n",
  646. " </tr>\n",
  647. " <tr>\n",
  648. " <td>21</td>\n",
  649. " <td>0.408100</td>\n",
  650. " <td>0.443258</td>\n",
  651. " <td>0.794823</td>\n",
  652. " <td>0.865915</td>\n",
  653. " <td>0.714590</td>\n",
  654. " </tr>\n",
  655. " <tr>\n",
  656. " <td>22</td>\n",
  657. " <td>0.408100</td>\n",
  658. " <td>0.450756</td>\n",
  659. " <td>0.805369</td>\n",
  660. " <td>0.871438</td>\n",
  661. " <td>0.735522</td>\n",
  662. " </tr>\n",
  663. " <tr>\n",
  664. " <td>23</td>\n",
  665. " <td>0.404600</td>\n",
  666. " <td>0.483001</td>\n",
  667. " <td>0.797699</td>\n",
  668. " <td>0.867379</td>\n",
  669. " <td>0.720558</td>\n",
  670. " </tr>\n",
  671. " <tr>\n",
  672. " <td>24</td>\n",
  673. " <td>0.404600</td>\n",
  674. " <td>0.481094</td>\n",
  675. " <td>0.794823</td>\n",
  676. " <td>0.866417</td>\n",
  677. " <td>0.712134</td>\n",
  678. " </tr>\n",
  679. " <tr>\n",
  680. " <td>25</td>\n",
  681. " <td>0.397200</td>\n",
  682. " <td>0.509731</td>\n",
  683. " <td>0.798658</td>\n",
  684. " <td>0.867925</td>\n",
  685. " <td>0.722269</td>\n",
  686. " </tr>\n",
  687. " <tr>\n",
  688. " <td>26</td>\n",
  689. " <td>0.397200</td>\n",
  690. " <td>0.468457</td>\n",
  691. " <td>0.813998</td>\n",
  692. " <td>0.872870</td>\n",
  693. " <td>0.763221</td>\n",
  694. " </tr>\n",
  695. " <tr>\n",
  696. " <td>27</td>\n",
  697. " <td>0.388100</td>\n",
  698. " <td>0.450646</td>\n",
  699. " <td>0.802493</td>\n",
  700. " <td>0.869785</td>\n",
  701. " <td>0.730527</td>\n",
  702. " </tr>\n",
  703. " <tr>\n",
  704. " <td>28</td>\n",
  705. " <td>0.379900</td>\n",
  706. " <td>0.518912</td>\n",
  707. " <td>0.800575</td>\n",
  708. " <td>0.868852</td>\n",
  709. " <td>0.726426</td>\n",
  710. " </tr>\n",
  711. " <tr>\n",
  712. " <td>29</td>\n",
  713. " <td>0.379900</td>\n",
  714. " <td>0.474939</td>\n",
  715. " <td>0.803452</td>\n",
  716. " <td>0.870988</td>\n",
  717. " <td>0.729257</td>\n",
  718. " </tr>\n",
  719. " <tr>\n",
  720. " <td>30</td>\n",
  721. " <td>0.375800</td>\n",
  722. " <td>0.468194</td>\n",
  723. " <td>0.799616</td>\n",
  724. " <td>0.868636</td>\n",
  725. " <td>0.723207</td>\n",
  726. " </tr>\n",
  727. " <tr>\n",
  728. " <td>31</td>\n",
  729. " <td>0.375800</td>\n",
  730. " <td>0.447116</td>\n",
  731. " <td>0.810163</td>\n",
  732. " <td>0.872423</td>\n",
  733. " <td>0.750818</td>\n",
  734. " </tr>\n",
  735. " <tr>\n",
  736. " <td>32</td>\n",
  737. " <td>0.370700</td>\n",
  738. " <td>0.537091</td>\n",
  739. " <td>0.802493</td>\n",
  740. " <td>0.870113</td>\n",
  741. " <td>0.729057</td>\n",
  742. " </tr>\n",
  743. " <tr>\n",
  744. " <td>33</td>\n",
  745. " <td>0.370700</td>\n",
  746. " <td>0.475261</td>\n",
  747. " <td>0.807287</td>\n",
  748. " <td>0.871071</td>\n",
  749. " <td>0.744834</td>\n",
  750. " </tr>\n",
  751. " <tr>\n",
  752. " <td>34</td>\n",
  753. " <td>0.367900</td>\n",
  754. " <td>0.487207</td>\n",
  755. " <td>0.802493</td>\n",
  756. " <td>0.870603</td>\n",
  757. " <td>0.726799</td>\n",
  758. " </tr>\n",
  759. " <tr>\n",
  760. " <td>35</td>\n",
  761. " <td>0.367900</td>\n",
  762. " <td>0.437785</td>\n",
  763. " <td>0.806328</td>\n",
  764. " <td>0.871338</td>\n",
  765. " <td>0.739932</td>\n",
  766. " </tr>\n",
  767. " <tr>\n",
  768. " <td>36</td>\n",
  769. " <td>0.358800</td>\n",
  770. " <td>0.508899</td>\n",
  771. " <td>0.808245</td>\n",
  772. " <td>0.872774</td>\n",
  773. " <td>0.741834</td>\n",
  774. " </tr>\n",
  775. " <tr>\n",
  776. " <td>37</td>\n",
  777. " <td>0.358800</td>\n",
  778. " <td>0.552409</td>\n",
  779. " <td>0.800575</td>\n",
  780. " <td>0.869347</td>\n",
  781. " <td>0.724147</td>\n",
  782. " </tr>\n",
  783. " <tr>\n",
  784. " <td>38</td>\n",
  785. " <td>0.355700</td>\n",
  786. " <td>0.496687</td>\n",
  787. " <td>0.802493</td>\n",
  788. " <td>0.871571</td>\n",
  789. " <td>0.722093</td>\n",
  790. " </tr>\n",
  791. " <tr>\n",
  792. " <td>39</td>\n",
  793. " <td>0.355700</td>\n",
  794. " <td>0.504841</td>\n",
  795. " <td>0.816874</td>\n",
  796. " <td>0.875570</td>\n",
  797. " <td>0.764464</td>\n",
  798. " </tr>\n",
  799. " <tr>\n",
  800. " <td>40</td>\n",
  801. " <td>0.345500</td>\n",
  802. " <td>0.483254</td>\n",
  803. " <td>0.790988</td>\n",
  804. " <td>0.865929</td>\n",
  805. " <td>0.696008</td>\n",
  806. " </tr>\n",
  807. " <tr>\n",
  808. " <td>41</td>\n",
  809. " <td>0.345500</td>\n",
  810. " <td>0.512504</td>\n",
  811. " <td>0.796740</td>\n",
  812. " <td>0.868323</td>\n",
  813. " <td>0.711472</td>\n",
  814. " </tr>\n",
  815. " <tr>\n",
  816. " <td>42</td>\n",
  817. " <td>0.351700</td>\n",
  818. " <td>0.497110</td>\n",
  819. " <td>0.800575</td>\n",
  820. " <td>0.870486</td>\n",
  821. " <td>0.718576</td>\n",
  822. " </tr>\n",
  823. " <tr>\n",
  824. " <td>43</td>\n",
  825. " <td>0.339900</td>\n",
  826. " <td>0.471216</td>\n",
  827. " <td>0.798658</td>\n",
  828. " <td>0.867758</td>\n",
  829. " <td>0.723036</td>\n",
  830. " </tr>\n",
  831. " <tr>\n",
  832. " <td>44</td>\n",
  833. " <td>0.339900</td>\n",
  834. " <td>0.531487</td>\n",
  835. " <td>0.805369</td>\n",
  836. " <td>0.870783</td>\n",
  837. " <td>0.738304</td>\n",
  838. " </tr>\n",
  839. " <tr>\n",
  840. " <td>45</td>\n",
  841. " <td>0.341300</td>\n",
  842. " <td>0.540843</td>\n",
  843. " <td>0.807287</td>\n",
  844. " <td>0.870740</td>\n",
  845. " <td>0.746104</td>\n",
  846. " </tr>\n",
  847. " <tr>\n",
  848. " <td>46</td>\n",
  849. " <td>0.341300</td>\n",
  850. " <td>0.476809</td>\n",
  851. " <td>0.803452</td>\n",
  852. " <td>0.869841</td>\n",
  853. " <td>0.734334</td>\n",
  854. " </tr>\n",
  855. " <tr>\n",
  856. " <td>47</td>\n",
  857. " <td>0.337400</td>\n",
  858. " <td>0.479455</td>\n",
  859. " <td>0.819751</td>\n",
  860. " <td>0.877124</td>\n",
  861. " <td>0.769497</td>\n",
  862. " </tr>\n",
  863. " <tr>\n",
  864. " <td>48</td>\n",
  865. " <td>0.337400</td>\n",
  866. " <td>0.446018</td>\n",
  867. " <td>0.815916</td>\n",
  868. " <td>0.875163</td>\n",
  869. " <td>0.762399</td>\n",
  870. " </tr>\n",
  871. " <tr>\n",
  872. " <td>49</td>\n",
  873. " <td>0.334200</td>\n",
  874. " <td>0.548959</td>\n",
  875. " <td>0.813039</td>\n",
  876. " <td>0.875080</td>\n",
  877. " <td>0.751826</td>\n",
  878. " </tr>\n",
  879. " <tr>\n",
  880. " <td>50</td>\n",
  881. " <td>0.334200</td>\n",
  882. " <td>0.500371</td>\n",
  883. " <td>0.797699</td>\n",
  884. " <td>0.867379</td>\n",
  885. " <td>0.720558</td>\n",
  886. " </tr>\n",
  887. " <tr>\n",
  888. " <td>51</td>\n",
  889. " <td>0.331700</td>\n",
  890. " <td>0.503151</td>\n",
  891. " <td>0.808245</td>\n",
  892. " <td>0.871134</td>\n",
  893. " <td>0.748301</td>\n",
  894. " </tr>\n",
  895. " <tr>\n",
  896. " <td>52</td>\n",
  897. " <td>0.331700</td>\n",
  898. " <td>0.556216</td>\n",
  899. " <td>0.798658</td>\n",
  900. " <td>0.868586</td>\n",
  901. " <td>0.719129</td>\n",
  902. " </tr>\n",
  903. " <tr>\n",
  904. " <td>53</td>\n",
  905. " <td>0.326700</td>\n",
  906. " <td>0.478857</td>\n",
  907. " <td>0.816874</td>\n",
  908. " <td>0.875245</td>\n",
  909. " <td>0.765550</td>\n",
  910. " </tr>\n",
  911. " <tr>\n",
  912. " <td>54</td>\n",
  913. " <td>0.326700</td>\n",
  914. " <td>0.508674</td>\n",
  915. " <td>0.806328</td>\n",
  916. " <td>0.870347</td>\n",
  917. " <td>0.743885</td>\n",
  918. " </tr>\n",
  919. " <tr>\n",
  920. " <td>55</td>\n",
  921. " <td>0.326700</td>\n",
  922. " <td>0.510241</td>\n",
  923. " <td>0.807287</td>\n",
  924. " <td>0.870740</td>\n",
  925. " <td>0.746104</td>\n",
  926. " </tr>\n",
  927. " <tr>\n",
  928. " <td>56</td>\n",
  929. " <td>0.327800</td>\n",
  930. " <td>0.510437</td>\n",
  931. " <td>0.803452</td>\n",
  932. " <td>0.870335</td>\n",
  933. " <td>0.732197</td>\n",
  934. " </tr>\n",
  935. " <tr>\n",
  936. " <td>57</td>\n",
  937. " <td>0.327800</td>\n",
  938. " <td>0.516560</td>\n",
  939. " <td>0.804410</td>\n",
  940. " <td>0.871212</td>\n",
  941. " <td>0.732419</td>\n",
  942. " </tr>\n",
  943. " <tr>\n",
  944. " <td>58</td>\n",
  945. " <td>0.320600</td>\n",
  946. " <td>0.482175</td>\n",
  947. " <td>0.810163</td>\n",
  948. " <td>0.872258</td>\n",
  949. " <td>0.751428</td>\n",
  950. " </tr>\n",
  951. " <tr>\n",
  952. " <td>59</td>\n",
  953. " <td>0.320600</td>\n",
  954. " <td>0.534551</td>\n",
  955. " <td>0.809204</td>\n",
  956. " <td>0.870527</td>\n",
  957. " <td>0.754025</td>\n",
  958. " </tr>\n",
  959. " <tr>\n",
  960. " <td>60</td>\n",
  961. " <td>0.311600</td>\n",
  962. " <td>0.529513</td>\n",
  963. " <td>0.804410</td>\n",
  964. " <td>0.869063</td>\n",
  965. " <td>0.741350</td>\n",
  966. " </tr>\n",
  967. " <tr>\n",
  968. " <td>61</td>\n",
  969. " <td>0.311600</td>\n",
  970. " <td>0.529038</td>\n",
  971. " <td>0.812081</td>\n",
  972. " <td>0.872892</td>\n",
  973. " <td>0.756299</td>\n",
  974. " </tr>\n",
  975. " <tr>\n",
  976. " <td>62</td>\n",
  977. " <td>0.317900</td>\n",
  978. " <td>0.551885</td>\n",
  979. " <td>0.797699</td>\n",
  980. " <td>0.866032</td>\n",
  981. " <td>0.726558</td>\n",
  982. " </tr>\n",
  983. " <tr>\n",
  984. " <td>63</td>\n",
  985. " <td>0.317900</td>\n",
  986. " <td>0.500419</td>\n",
  987. " <td>0.808245</td>\n",
  988. " <td>0.870968</td>\n",
  989. " <td>0.748917</td>\n",
  990. " </tr>\n",
  991. " <tr>\n",
  992. " <td>64</td>\n",
  993. " <td>0.315100</td>\n",
  994. " <td>0.466086</td>\n",
  995. " <td>0.809204</td>\n",
  996. " <td>0.871861</td>\n",
  997. " <td>0.749251</td>\n",
  998. " </tr>\n",
  999. " <tr>\n",
  1000. " <td>65</td>\n",
  1001. " <td>0.315100</td>\n",
  1002. " <td>0.492729</td>\n",
  1003. " <td>0.811122</td>\n",
  1004. " <td>0.872821</td>\n",
  1005. " <td>0.752984</td>\n",
  1006. " </tr>\n",
  1007. " <tr>\n",
  1008. " <td>66</td>\n",
  1009. " <td>0.306300</td>\n",
  1010. " <td>0.463267</td>\n",
  1011. " <td>0.813998</td>\n",
  1012. " <td>0.874352</td>\n",
  1013. " <td>0.758209</td>\n",
  1014. " </tr>\n",
  1015. " <tr>\n",
  1016. " <td>67</td>\n",
  1017. " <td>0.306300</td>\n",
  1018. " <td>0.568536</td>\n",
  1019. " <td>0.811122</td>\n",
  1020. " <td>0.872821</td>\n",
  1021. " <td>0.752984</td>\n",
  1022. " </tr>\n",
  1023. " <tr>\n",
  1024. " <td>68</td>\n",
  1025. " <td>0.308500</td>\n",
  1026. " <td>0.539011</td>\n",
  1027. " <td>0.803452</td>\n",
  1028. " <td>0.868167</td>\n",
  1029. " <td>0.741052</td>\n",
  1030. " </tr>\n",
  1031. " <tr>\n",
  1032. " <td>69</td>\n",
  1033. " <td>0.308500</td>\n",
  1034. " <td>0.526197</td>\n",
  1035. " <td>0.808245</td>\n",
  1036. " <td>0.871300</td>\n",
  1037. " <td>0.747680</td>\n",
  1038. " </tr>\n",
  1039. " <tr>\n",
  1040. " <td>70</td>\n",
  1041. " <td>0.304900</td>\n",
  1042. " <td>0.506041</td>\n",
  1043. " <td>0.811122</td>\n",
  1044. " <td>0.872657</td>\n",
  1045. " <td>0.753583</td>\n",
  1046. " </tr>\n",
  1047. " <tr>\n",
  1048. " <td>71</td>\n",
  1049. " <td>0.302700</td>\n",
  1050. " <td>0.581929</td>\n",
  1051. " <td>0.798658</td>\n",
  1052. " <td>0.866751</td>\n",
  1053. " <td>0.727493</td>\n",
  1054. " </tr>\n",
  1055. " <tr>\n",
  1056. " <td>72</td>\n",
  1057. " <td>0.302700</td>\n",
  1058. " <td>0.516497</td>\n",
  1059. " <td>0.810163</td>\n",
  1060. " <td>0.872258</td>\n",
  1061. " <td>0.751428</td>\n",
  1062. " </tr>\n",
  1063. " <tr>\n",
  1064. " <td>73</td>\n",
  1065. " <td>0.308000</td>\n",
  1066. " <td>0.507128</td>\n",
  1067. " <td>0.807287</td>\n",
  1068. " <td>0.870239</td>\n",
  1069. " <td>0.747969</td>\n",
  1070. " </tr>\n",
  1071. " <tr>\n",
  1072. " <td>74</td>\n",
  1073. " <td>0.308000</td>\n",
  1074. " <td>0.520996</td>\n",
  1075. " <td>0.803452</td>\n",
  1076. " <td>0.868167</td>\n",
  1077. " <td>0.741052</td>\n",
  1078. " </tr>\n",
  1079. " <tr>\n",
  1080. " <td>75</td>\n",
  1081. " <td>0.304900</td>\n",
  1082. " <td>0.517548</td>\n",
  1083. " <td>0.806328</td>\n",
  1084. " <td>0.869677</td>\n",
  1085. " <td>0.746406</td>\n",
  1086. " </tr>\n",
  1087. " <tr>\n",
  1088. " <td>76</td>\n",
  1089. " <td>0.304900</td>\n",
  1090. " <td>0.503817</td>\n",
  1091. " <td>0.804410</td>\n",
  1092. " <td>0.868726</td>\n",
  1093. " <td>0.742634</td>\n",
  1094. " </tr>\n",
  1095. " <tr>\n",
  1096. " <td>77</td>\n",
  1097. " <td>0.298100</td>\n",
  1098. " <td>0.508880</td>\n",
  1099. " <td>0.809204</td>\n",
  1100. " <td>0.871530</td>\n",
  1101. " <td>0.750476</td>\n",
  1102. " </tr>\n",
  1103. " <tr>\n",
  1104. " <td>78</td>\n",
  1105. " <td>0.298100</td>\n",
  1106. " <td>0.505606</td>\n",
  1107. " <td>0.808245</td>\n",
  1108. " <td>0.870801</td>\n",
  1109. " <td>0.749527</td>\n",
  1110. " </tr>\n",
  1111. " <tr>\n",
  1112. " <td>79</td>\n",
  1113. " <td>0.304900</td>\n",
  1114. " <td>0.526573</td>\n",
  1115. " <td>0.802493</td>\n",
  1116. " <td>0.867609</td>\n",
  1117. " <td>0.739465</td>\n",
  1118. " </tr>\n",
  1119. " <tr>\n",
  1120. " <td>80</td>\n",
  1121. " <td>0.304900</td>\n",
  1122. " <td>0.523581</td>\n",
  1123. " <td>0.804410</td>\n",
  1124. " <td>0.868726</td>\n",
  1125. " <td>0.742634</td>\n",
  1126. " </tr>\n",
  1127. " </tbody>\n",
  1128. "</table><p>"
  1129. ],
  1130. "text/plain": [
  1131. "<IPython.core.display.HTML object>"
  1132. ]
  1133. },
  1134. "metadata": {},
  1135. "output_type": "display_data"
  1136. },
  1137. {
  1138. "name": "stderr",
  1139. "output_type": "stream",
  1140. "text": [
  1141. "/home/mohalisad/anaconda3/envs/deep/lib/python3.10/site-packages/sklearn/metrics/_classification.py:1344: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
  1142. " _warn_prf(average, modifier, msg_start, len(result))\n",
  1143. "/home/mohalisad/anaconda3/envs/deep/lib/python3.10/site-packages/sklearn/metrics/_classification.py:1344: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
  1144. " _warn_prf(average, modifier, msg_start, len(result))\n",
  1145. "/home/mohalisad/anaconda3/envs/deep/lib/python3.10/site-packages/sklearn/metrics/_classification.py:1344: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
  1146. " _warn_prf(average, modifier, msg_start, len(result))\n"
  1147. ]
  1148. },
  1149. {
  1150. "data": {
  1151. "text/plain": [
  1152. "TrainOutput(global_step=21440, training_loss=0.37007259682043275, metrics={'train_runtime': 421.6464, 'train_samples_per_second': 1622.402, 'train_steps_per_second': 50.848, 'total_flos': 8141300538608160.0, 'train_loss': 0.37007259682043275, 'epoch': 80.0})"
  1153. ]
  1154. },
  1155. "execution_count": 29,
  1156. "metadata": {},
  1157. "output_type": "execute_result"
  1158. }
  1159. ],
  1160. "source": [
  1161. "from transformers import TrainingArguments, Trainer, DataCollatorWithPadding\n",
  1162. "from sklearn.metrics import classification_report\n",
  1163. "\n",
  1164. "\n",
  1165. "def compute_metrics(pred):\n",
  1166. " true_labels = pred.label_ids.ravel()\n",
  1167. " pred_labels = pred.predictions.argmax(-1).ravel()\n",
  1168. " report = classification_report(true_labels, pred_labels, output_dict=True)\n",
  1169. " return {\n",
  1170. " 'accuracy': report['accuracy'],\n",
  1171. " 'f1-score-1': report['1']['f1-score'],\n",
  1172. " 'f1-score-ma': report['macro avg']['f1-score']\n",
  1173. " }\n",
  1174. "\n",
  1175. "\n",
  1176. "# def train_model(input_model, task_name, train_dataset, eval_dataset, col_fn):\n",
  1177. "# training_args = TrainingArguments(\n",
  1178. "# evaluation_strategy=\"epoch\",\n",
  1179. "# save_strategy=\"epoch\",\n",
  1180. "# # The next 2 lines are important to ensure the dataset labels are properly passed to the model\n",
  1181. "# remove_unused_columns=False,\n",
  1182. "# **config.hf_trainer_params.to_dict()\n",
  1183. "# )\n",
  1184. "\n",
  1185. "# trainer = Trainer(\n",
  1186. "# model=input_model,\n",
  1187. "# args=training_args,\n",
  1188. "# train_dataset=train_dataset,\n",
  1189. "# eval_dataset=eval_dataset,\n",
  1190. "# data_collator=col_fn,\n",
  1191. "# compute_metrics=compute_metrics\n",
  1192. "# )\n",
  1193. "# trainer.train()\n",
  1194. "\n",
  1195. "col_fn = DataCollatorWithPadding(\n",
  1196. " tokenizer, return_tensors='pt', padding='longest'\n",
  1197. ")\n",
  1198. "\n",
  1199. "loader_out = autoload.get_and_map(tokenizer, \"glue:cola\")\n",
  1200. "num_labels = len(loader_out['output']['range'])\n",
  1201. "\n",
  1202. "training_args = TrainingArguments(\n",
  1203. " evaluation_strategy=\"epoch\",\n",
  1204. " save_strategy=\"epoch\",\n",
  1205. " # The next 2 lines are important to ensure the dataset labels are properly passed to the model\n",
  1206. " remove_unused_columns=False,\n",
  1207. " **config.hf_trainer_params.to_dict()\n",
  1208. ")\n",
  1209. "\n",
  1210. "trainer = Trainer(\n",
  1211. " model=model,\n",
  1212. " args=training_args,\n",
  1213. " train_dataset=loader_out['train'],\n",
  1214. " eval_dataset=loader_out['valid'],\n",
  1215. " data_collator=col_fn,\n",
  1216. " compute_metrics=compute_metrics\n",
  1217. ")\n",
  1218. "trainer.train()"
  1219. ]
  1220. },
  1221. {
  1222. "cell_type": "code",
  1223. "execution_count": 72,
  1224. "id": "00bc804c-6133-4ccc-b6c4-697d859f94cf",
  1225. "metadata": {
  1226. "tags": []
  1227. },
  1228. "outputs": [
  1229. {
  1230. "data": {
  1231. "text/plain": [
  1232. "BertLayer(\n",
  1233. " (attention): BertAttention(\n",
  1234. " (self): BertSelfAttention(\n",
  1235. " (query): Linear(in_features=768, out_features=768, bias=True)\n",
  1236. " (key): Linear(in_features=768, out_features=768, bias=True)\n",
  1237. " (value): Linear(in_features=768, out_features=768, bias=True)\n",
  1238. " (dropout): Dropout(p=0.1, inplace=False)\n",
  1239. " )\n",
  1240. " (output): BertSelfOutput(\n",
  1241. " (dense): Linear(in_features=768, out_features=768, bias=True)\n",
  1242. " (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
  1243. " (dropout): Dropout(p=0.1, inplace=False)\n",
  1244. " )\n",
  1245. " )\n",
  1246. " (intermediate): BertIntermediate(\n",
  1247. " (dense): Linear(in_features=768, out_features=3072, bias=True)\n",
  1248. " (intermediate_act_fn): GELUActivation()\n",
  1249. " )\n",
  1250. " (output): BertOutput(\n",
  1251. " (dense): Linear(in_features=3072, out_features=768, bias=True)\n",
  1252. " (LayerNorm): LayerNorm((768,), eps=1e-12, elementwise_affine=True)\n",
  1253. " (dropout): Dropout(p=0.1, inplace=False)\n",
  1254. " )\n",
  1255. ")"
  1256. ]
  1257. },
  1258. "execution_count": 72,
  1259. "metadata": {},
  1260. "output_type": "execute_result"
  1261. }
  1262. ],
  1263. "source": [
  1264. "model.bert.encoder.layer[0]"
  1265. ]
  1266. },
  1267. {
  1268. "cell_type": "code",
  1269. "execution_count": null,
  1270. "id": "ad825227-f073-4d58-9b06-15dc882e2f74",
  1271. "metadata": {
  1272. "tags": []
  1273. },
  1274. "outputs": [],
  1275. "source": [
  1276. "tokenizer(\"hi bye\", return_tensors='pt')"
  1277. ]
  1278. },
  1279. {
  1280. "cell_type": "code",
  1281. "execution_count": null,
  1282. "id": "a015bcd2-6768-4289-a189-74ae9c7b08de",
  1283. "metadata": {
  1284. "tags": []
  1285. },
  1286. "outputs": [],
  1287. "source": [
  1288. "for param_name, weights in model.named_parameters():\n",
  1289. " if weights.requires_grad:\n",
  1290. " print(param_name)"
  1291. ]
  1292. },
  1293. {
  1294. "cell_type": "code",
  1295. "execution_count": null,
  1296. "id": "a009912e-7366-4deb-aae0-e78331b7e160",
  1297. "metadata": {
  1298. "tags": []
  1299. },
  1300. "outputs": [],
  1301. "source": [
  1302. "new_w = peft_module.get_weights()\n",
  1303. "new_w\n",
  1304. "(new_w - old_w).sum()"
  1305. ]
  1306. },
  1307. {
  1308. "cell_type": "code",
  1309. "execution_count": null,
  1310. "id": "98004c44-7a88-4f58-b956-d2ce6c3c56a8",
  1311. "metadata": {
  1312. "tags": []
  1313. },
  1314. "outputs": [],
  1315. "source": [
  1316. "new_w"
  1317. ]
  1318. },
  1319. {
  1320. "cell_type": "code",
  1321. "execution_count": null,
  1322. "id": "4301e6f1-8212-4a67-b93d-589717098b15",
  1323. "metadata": {
  1324. "tags": []
  1325. },
  1326. "outputs": [],
  1327. "source": [
  1328. "new_model = BertForSequenceClassification.from_pretrained(MODEL_NAME)\n",
  1329. "new_model.to(DEVICE)\n",
  1330. "nm_sd = new_model.state_dict()"
  1331. ]
  1332. },
  1333. {
  1334. "cell_type": "code",
  1335. "execution_count": null,
  1336. "id": "4c001d2d-e862-4926-8a31-e218489654e1",
  1337. "metadata": {
  1338. "tags": []
  1339. },
  1340. "outputs": [],
  1341. "source": [
  1342. "old_sd = model.state_dict()"
  1343. ]
  1344. },
  1345. {
  1346. "cell_type": "code",
  1347. "execution_count": null,
  1348. "id": "db972023-2457-40fa-8b24-c9df81e9cb51",
  1349. "metadata": {
  1350. "tags": []
  1351. },
  1352. "outputs": [],
  1353. "source": [
  1354. "for key, val in old_sd.items():\n",
  1355. " if key not in nm_sd:\n",
  1356. " print(key)"
  1357. ]
  1358. },
  1359. {
  1360. "cell_type": "code",
  1361. "execution_count": null,
  1362. "id": "0f3ba7b5-441e-411d-a848-f907ec27496e",
  1363. "metadata": {
  1364. "tags": []
  1365. },
  1366. "outputs": [],
  1367. "source": [
  1368. "model.bert.embeddings.word_embeddings.sadcl_soft_prompt.learned_embedding.requires_grad"
  1369. ]
  1370. },
  1371. {
  1372. "cell_type": "code",
  1373. "execution_count": null,
  1374. "id": "10c1e258-df79-46ce-bc99-cbd024948379",
  1375. "metadata": {
  1376. "tags": []
  1377. },
  1378. "outputs": [],
  1379. "source": [
  1380. "(old_sd['bert.embeddings.word_embeddings.emb_layer.weight'] - nm_sd['bert.embeddings.word_embeddings.weight']).sum()"
  1381. ]
  1382. },
  1383. {
  1384. "cell_type": "code",
  1385. "execution_count": null,
  1386. "id": "8c8ba1bf-dc9d-4864-8aa6-57140c6eb116",
  1387. "metadata": {
  1388. "tags": []
  1389. },
  1390. "outputs": [],
  1391. "source": []
  1392. },
  1393. {
  1394. "cell_type": "code",
  1395. "execution_count": null,
  1396. "id": "ed628433-610c-4a45-9780-42927e84198e",
  1397. "metadata": {
  1398. "tags": []
  1399. },
  1400. "outputs": [],
  1401. "source": []
  1402. },
  1403. {
  1404. "cell_type": "code",
  1405. "execution_count": null,
  1406. "id": "7247d9a1-5b2a-4379-8983-22aaf2d20b7b",
  1407. "metadata": {},
  1408. "outputs": [],
  1409. "source": []
  1410. },
  1411. {
  1412. "cell_type": "code",
  1413. "execution_count": null,
  1414. "id": "61c894cd-d601-43e0-b7b1-c3c67b5cd1f7",
  1415. "metadata": {
  1416. "tags": []
  1417. },
  1418. "outputs": [],
  1419. "source": [
  1420. "a = \"hi bye\"\n",
  1421. "model.eval()\n",
  1422. "with torch.no_grad():\n",
  1423. " tokens = tokenizer(a, return_tensors='pt').to(DEVICE)\n",
  1424. " o = model.bert.embeddings.word_embeddings(peft_module.post_tokenizer(input_ids=tokens['input_ids'])['input_ids'])"
  1425. ]
  1426. },
  1427. {
  1428. "cell_type": "code",
  1429. "execution_count": null,
  1430. "id": "e1dad24f",
  1431. "metadata": {
  1432. "tags": []
  1433. },
  1434. "outputs": [
  1435. {
  1436. "data": {
  1437. "text/plain": [
  1438. "torch.Size([1, 10, 768])"
  1439. ]
  1440. },
  1441. "execution_count": 23,
  1442. "metadata": {},
  1443. "output_type": "execute_result"
  1444. }
  1445. ],
  1446. "source": [
  1447. "o.shape"
  1448. ]
  1449. },
  1450. {
  1451. "cell_type": "code",
  1452. "execution_count": null,
  1453. "id": "632a6a0b-e2e3-4d3b-9d12-021f08e39b6e",
  1454. "metadata": {
  1455. "tags": []
  1456. },
  1457. "outputs": [],
  1458. "source": [
  1459. "o"
  1460. ]
  1461. },
  1462. {
  1463. "cell_type": "code",
  1464. "execution_count": null,
  1465. "id": "811ee37b-61fb-4103-8900-64a5ffd2e7c7",
  1466. "metadata": {
  1467. "tags": []
  1468. },
  1469. "outputs": [],
  1470. "source": [
  1471. "len(out.hidden_states)"
  1472. ]
  1473. },
  1474. {
  1475. "cell_type": "code",
  1476. "execution_count": null,
  1477. "id": "3d768fa9-2d1d-4598-8fb1-3681a8f53897",
  1478. "metadata": {
  1479. "tags": []
  1480. },
  1481. "outputs": [],
  1482. "source": [
  1483. "setattr(c, 'a', 3)"
  1484. ]
  1485. },
  1486. {
  1487. "cell_type": "code",
  1488. "execution_count": null,
  1489. "id": "3293b793-32b7-4cee-986b-1286604e361b",
  1490. "metadata": {
  1491. "tags": []
  1492. },
  1493. "outputs": [],
  1494. "source": [
  1495. "class c:\n",
  1496. " def a(self, i):\n",
  1497. " print(i + 1)\n",
  1498. " \n",
  1499. " def b(self):\n",
  1500. " c = self.a\n",
  1501. " self.a = lambda i: c(i + 1)\n",
  1502. "\n",
  1503. "o = c()\n",
  1504. "o.a(3)\n",
  1505. "o.b()\n",
  1506. "o.a(3)"
  1507. ]
  1508. },
  1509. {
  1510. "cell_type": "code",
  1511. "execution_count": null,
  1512. "id": "41adc585-a890-4971-95b9-df994adaada2",
  1513. "metadata": {},
  1514. "outputs": [],
  1515. "source": []
  1516. }
  1517. ],
  1518. "metadata": {
  1519. "kernelspec": {
  1520. "display_name": "Python [conda env:deep]",
  1521. "language": "python",
  1522. "name": "conda-env-deep-py"
  1523. },
  1524. "language_info": {
  1525. "codemirror_mode": {
  1526. "name": "ipython",
  1527. "version": 3
  1528. },
  1529. "file_extension": ".py",
  1530. "mimetype": "text/x-python",
  1531. "name": "python",
  1532. "nbconvert_exporter": "python",
  1533. "pygments_lexer": "ipython3",
  1534. "version": "3.10.11"
  1535. }
  1536. },
  1537. "nbformat": 4,
  1538. "nbformat_minor": 5
  1539. }