{"payload":{"header_redesign_enabled":false,"results":[{"id":"466635976","archived":false,"color":"#3572A5","followers":8,"has_funding_file":false,"hl_name":"sangmichaelxie/pretraining_analysis","hl_trunc_description":"Code for the NeurIPS 2021 paper \"Why Do Pretrained Language Models Help in Downstream Tasks? An Analysis of Head and Prompt Tuning\"","language":"Python","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":466635976,"name":"pretraining_analysis","owner_id":7904586,"owner_login":"sangmichaelxie","updated_at":"2022-03-06T08:30:07.645Z","has_issues":true}},"sponsorable":false,"topics":[],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":75,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Asangmichaelxie%252Fpretraining_analysis%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/sangmichaelxie/pretraining_analysis/star":{"post":"DZRfSxgGX_c9VFi0WrRAvkIsIrwYe2pm_hbo1vmCtFYRfLdy5WYwtsKhnUdapzvciAykFpPlnBGgcwRt3BMGLA"},"/sangmichaelxie/pretraining_analysis/unstar":{"post":"wUKJpVrRsZU67kN3bVZaHlXKbJryasT3Og_GTWuT9HC0z2j3sFmr2K4fjyHoKe0Fob4DBETjBiF_Lnkz80yPeA"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"wFVyQKixcm6f7uaAAtxOf8-pxDEi3Dk-4jxqiNtCtJHk9rUaTS_C_gQRe39DOuMb7L7-AyzXCgNDLRnnkt7_oQ"}}},"title":"Repository search results"}