{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"Megatron-DeepSpeed","owner":"SysML-project","isFork":true,"description":"Ongoing research training transformer language models at scale, including: BERT & GPT-2","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":1993,"license":"Other","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-03T20:27:00.642Z"}},{"type":"Public","name":"dlrm_prof","owner":"SysML-project","isFork":true,"description":"An implementation of a deep learning recommendation model (DLRM)","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":815,"license":"MIT License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2022-11-30T19:46:02.350Z"}}],"repositoryCount":2,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}