{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"lmdeploy","owner":"InternLM","isFork":false,"description":"LMDeploy is a toolkit for compressing, deploying, and serving LLMs.","topicNames":["llama","cuda-kernels","deepspeed","llm","fastertransformer","llm-inference","turbomind","internlm","llama2","codellama"],"topicsNotShown":1,"allTopics":["llama","cuda-kernels","deepspeed","llm","fastertransformer","llm-inference","turbomind","internlm","llama2","codellama","llama3"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":24,"issueCount":107,"starsCount":2636,"forksCount":240,"license":"Apache License 2.0","participation":[0,0,0,13,12,41,10,21,17,9,7,16,15,7,10,9,7,10,0,8,10,7,2,14,7,10,11,11,11,17,11,7,22,26,19,26,16,1,16,16,30,17,18,14,12,13,14,22,10,13,11,10],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-24T09:22:25.497Z"}},{"type":"Public","name":"InternEvo","owner":"InternLM","isFork":false,"description":"","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":15,"issueCount":28,"starsCount":177,"forksCount":30,"license":"Apache License 2.0","participation":[0,0,0,0,0,17,13,10,15,12,9,10,22,10,25,12,18,17,0,7,10,5,6,16,10,5,5,9,6,4,0,3,1,2,6,11,2,0,3,16,5,9,9,19,20,20,3,0,0,0,4,7],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-24T09:12:55.972Z"}},{"type":"Public","name":"agentlego","owner":"InternLM","isFork":false,"description":"Enhance LLM agents with versatile tool APIs","topicNames":["large-language-models","llm","llm-agents"],"topicsNotShown":0,"allTopics":["large-language-models","llm","llm-agents"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":2,"starsCount":220,"forksCount":21,"license":"Apache License 2.0","participation":[4,14,2,1,0,1,1,1,2,8,0,1,1,4,2,7,4,4,0,5,0,0,0,15,7,0,0,6,12,2,0,0,0,0,0,0,8,0,1,1,3,3,0,0,0,0,1,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-23T13:46:56.847Z"}},{"type":"Public","name":"HuixiangDou","owner":"InternLM","isFork":false,"description":"HuixiangDou: Overcoming Group Chat Scenarios with LLM-based Technical Assistance","topicNames":["application","ocr","robot","pipeline","dsl","chatbot","wechat","assistance","lark","multimodal"],"topicsNotShown":2,"allTopics":["application","ocr","robot","pipeline","dsl","chatbot","wechat","assistance","lark","multimodal","rag","llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":14,"starsCount":937,"forksCount":82,"license":"BSD 3-Clause \"New\" or \"Revised\" License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,40,17,30,44,2,36,187,59,66,67,45,57,49,13,5,3,7,4,7],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-23T12:04:48.178Z"}},{"type":"Public","name":"xtuner","owner":"InternLM","isFork":false,"description":"An efficient, flexible and full-featured toolkit for fine-tuning large models (InternLM2, Llama3, Phi3, Qwen, Mistral, ...)","topicNames":["agent","chatbot","conversational-ai","peft","baichuan","msagent","large-language-models","llm","supervised-finetuning","llava"],"topicsNotShown":9,"allTopics":["agent","chatbot","conversational-ai","peft","baichuan","msagent","large-language-models","llm","supervised-finetuning","llava","llm-training","chatglm2","internlm","llama2","qwen","chatglm3","mixtral","llama3","phi3"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":27,"issueCount":71,"starsCount":2824,"forksCount":221,"license":"Apache License 2.0","participation":[0,0,0,0,0,1,6,3,7,4,4,13,11,30,9,4,4,3,0,3,0,5,4,3,9,6,2,2,4,3,8,1,2,16,8,14,8,0,1,8,2,5,8,7,10,1,9,17,5,1,10,3],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-23T08:12:59.282Z"}},{"type":"Public","name":"Tutorial","owner":"InternLM","isFork":false,"description":"LLM Tutorial","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":17,"issueCount":29,"starsCount":767,"forksCount":166,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,9,12,6,42,15,8,1,1,0,0,1,0,18,0,0,0,0,1,1,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-23T06:51:47.403Z"}},{"type":"Public","name":"lagent","owner":"InternLM","isFork":false,"description":"A lightweight framework for building LLM-based agents","topicNames":["agent","transformers","gpt","llm"],"topicsNotShown":0,"allTopics":["agent","transformers","gpt","llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":8,"issueCount":9,"starsCount":876,"forksCount":94,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,19,0,0,2,4,1,0,0,3,6,6,0,2,0,2,0,0,0,0,1,0,1,1,32,6,0,4,2,1,0,3,1,0,1,2,0,1,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-11T06:58:55.828Z"}},{"type":"Public","name":"InternLM-XComposer","owner":"InternLM","isFork":false,"description":"InternLM-XComposer2 is a groundbreaking vision-language large model (VLLM) excelling in free-form text-image composition and comprehension. ","topicNames":["foundation","gpt","language-model","multimodal","multi-modality","vision-transformer","gpt-4","visual-language-learning","llm","chatgpt"],"topicsNotShown":6,"allTopics":["foundation","gpt","language-model","multimodal","multi-modality","vision-transformer","gpt-4","visual-language-learning","llm","chatgpt","instruction-tuning","large-language-model","supervised-finetuning","mllm","vision-language-model","large-vision-language-model"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":10,"issueCount":85,"starsCount":1759,"forksCount":118,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,42,6,27,25,1,5,0,0,44,3,0,13,2,3,3,1,0,4,75,17,0,5,3,7,0,4,0,0,6,0,11,3,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-08T18:32:58.760Z"}},{"type":"Public","name":"InternLM","owner":"InternLM","isFork":false,"description":"Official release of InternLM2 7B and 20B base and chat models. 200K context support","topicNames":["chatbot","chinese","gpt","pretrained-models","llm","long-context","rlhf","large-language-model","flash-attention","fine-tuning-llm"],"topicsNotShown":0,"allTopics":["chatbot","chinese","gpt","pretrained-models","llm","long-context","rlhf","large-language-model","flash-attention","fine-tuning-llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":3,"starsCount":5307,"forksCount":379,"license":"Apache License 2.0","participation":[0,0,0,0,0,17,13,10,15,12,9,10,22,10,25,10,6,0,0,0,0,3,2,0,0,0,0,0,1,1,2,1,0,21,15,3,0,0,2,2,1,1,0,0,1,2,0,1,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-22T09:03:47.313Z"}},{"type":"Public","name":"OpenAOE","owner":"InternLM","isFork":false,"description":"LLM Group Chat Framework: chat with multiple LLMs at the same time. 大模型群聊框架:同时与多个大语言模型聊天。","topicNames":["chat-application","llms"],"topicsNotShown":0,"allTopics":["chat-application","llms"],"primaryLanguage":{"name":"TypeScript","color":"#3178c6"},"pullRequestCount":1,"issueCount":3,"starsCount":170,"forksCount":11,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,174,33,26,14,0,0,3,12,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-10T06:19:43.505Z"}},{"type":"Public","name":"Agent-FLAN","owner":"InternLM","isFork":false,"description":"[ACL2024 Findings] Agent-FLAN: Designing Data and Methods of Effective Agent Tuning for Large Language Models","topicNames":["agent","chatbot","gpt","llm","large-language-model","fine-tuning-llm"],"topicsNotShown":0,"allTopics":["agent","chatbot","gpt","llm","large-language-model","fine-tuning-llm"],"primaryLanguage":null,"pullRequestCount":1,"issueCount":10,"starsCount":211,"forksCount":7,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,14,1,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-22T15:12:14.194Z"}},{"type":"Public","name":"AcmeTrace","owner":"InternLM","isFork":false,"description":"","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Jupyter Notebook","color":"#DA5B0B"},"pullRequestCount":0,"issueCount":1,"starsCount":49,"forksCount":4,"license":"Creative Commons Attribution 4.0 International","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-03-12T11:49:56.739Z"}},{"type":"Public","name":"InternLM-Math","owner":"InternLM","isFork":false,"description":"","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":3,"starsCount":197,"forksCount":12,"license":"Apache License 2.0","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,28,16,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-02-14T04:51:26.369Z"}},{"type":"Public","name":".github","owner":"InternLM","isFork":false,"description":"","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":1,"license":null,"participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-23T15:46:17.549Z"}},{"type":"Public","name":"InternEvo_bak","owner":"InternLM","isFork":true,"description":"InternEvo is a high-performance training system for giant models.","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":9,"issueCount":0,"starsCount":1,"forksCount":379,"license":"Apache License 2.0","participation":[0,0,0,0,0,17,13,10,15,12,9,10,22,10,25,10,6,0,0,0,0,3,2,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-17T08:43:38.608Z"}},{"type":"Public","name":"pytorch_sphinx_theme","owner":"InternLM","isFork":true,"description":"PyTorch Sphinx Theme","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"CSS","color":"#563d7c"},"pullRequestCount":0,"issueCount":0,"starsCount":0,"forksCount":128,"license":"MIT License","participation":[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-03T05:14:34.765Z"}},{"type":"Public","name":"InternLMWeb","owner":"InternLM","isFork":false,"description":"","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"HTML","color":"#e34c26"},"pullRequestCount":0,"issueCount":0,"starsCount":12,"forksCount":7,"license":null,"participation":[9,13,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-07-12T12:17:27.099Z"}},{"type":"Public","name":"InternLM-techreport","owner":"InternLM","isFork":false,"description":"","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":null,"pullRequestCount":0,"issueCount":6,"starsCount":868,"forksCount":24,"license":null,"participation":[0,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2023-06-07T04:52:53.635Z"}}],"repositoryCount":18,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}