{"payload":{"header_redesign_enabled":false,"results":[{"id":"197785701","archived":false,"color":"#3572A5","followers":872,"has_funding_file":false,"hl_name":"davidmrau/mixture-of-experts","hl_trunc_description":"PyTorch Re-Implementation of \"The Sparsely-Gated Mixture-of-Experts Layer\" by Noam Shazeer et al. https://arxiv.org/abs/1701.06538","language":"Python","mirror":false,"owned_by_organization":false,"public":true,"repo":{"repository":{"id":197785701,"name":"mixture-of-experts","owner_id":20661461,"owner_login":"davidmrau","updated_at":"2024-04-19T08:22:39.825Z","has_issues":true}},"sponsorable":false,"topics":["pytorch","moe","re-implementation","mixture-of-experts","sparsely-gated-mixture-of-experts"],"type":"Public","help_wanted_issues_count":0,"good_first_issue_issues_count":0,"starred_by_current_user":false}],"type":"repositories","page":1,"page_count":1,"elapsed_millis":61,"errors":[],"result_count":1,"facets":[],"protected_org_logins":[],"topics":null,"query_id":"","logged_in":false,"sign_up_path":"/signup?source=code_search_results","sign_in_path":"/login?return_to=https%3A%2F%2Fgithub.com%2Fsearch%3Fq%3Drepo%253Adavidmrau%252Fmixture-of-experts%2B%2Blanguage%253APython","metadata":null,"csrf_tokens":{"/davidmrau/mixture-of-experts/star":{"post":"jADK4uWQCQp_HH5nyeomM0xYVmyxAny22AsHynzDDesxgH_OLBcFTwZsRxRW5AGbta6gKF01NUju5XVXQgIblQ"},"/davidmrau/mixture-of-experts/unstar":{"post":"Q7NtYbTpyPKj_NyaUk0FOCCtSckJYIQIhYovZietzmKYRJFSt595UPY04Mho-y1WZVSQtPFMDDvKDb60xTKdsQ"},"/sponsors/batch_deferred_sponsor_buttons":{"post":"bcM1jfD1l_yDSI3CrAf5_jbfu62dQf2h4lSgLToqlwwtYn1tKkTlYChygGMpMgT9eN6uMLbs1stcT1OOBWhyjQ"}}},"title":"Repository search results"}