{"payload":{"pageCount":1,"repositories":[{"type":"Public","name":"HuixiangDou","owner":"InternLM","isFork":false,"description":"HuixiangDou: Overcoming Group Chat Scenarios with LLM-based Technical Assistance","topicNames":["application","ocr","robot","pipeline","dsl","chatbot","wechat","assistance","lark","multimodal"],"topicsNotShown":2,"allTopics":["application","ocr","robot","pipeline","dsl","chatbot","wechat","assistance","lark","multimodal","rag","llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":14,"starsCount":937,"forksCount":82,"license":"BSD 3-Clause \"New\" or \"Revised\" License","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-24T12:59:38.784Z"}},{"type":"Public","name":"lmdeploy","owner":"InternLM","isFork":false,"description":"LMDeploy is a toolkit for compressing, deploying, and serving LLMs.","topicNames":["llama","cuda-kernels","deepspeed","llm","fastertransformer","llm-inference","turbomind","internlm","llama2","codellama"],"topicsNotShown":1,"allTopics":["llama","cuda-kernels","deepspeed","llm","fastertransformer","llm-inference","turbomind","internlm","llama2","codellama","llama3"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":21,"issueCount":106,"starsCount":2636,"forksCount":240,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-24T13:01:46.371Z"}},{"type":"Public","name":"InternEvo","owner":"InternLM","isFork":false,"description":"","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":15,"issueCount":28,"starsCount":177,"forksCount":30,"license":"Apache License 2.0","participation":[0,0,0,0,0,17,13,10,15,12,9,10,22,10,25,12,18,17,0,7,10,5,6,16,10,5,5,9,6,4,0,3,1,2,6,11,2,0,3,16,5,9,9,19,20,20,3,0,0,0,4,7],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-24T09:12:55.972Z"}},{"type":"Public","name":"agentlego","owner":"InternLM","isFork":false,"description":"Enhance LLM agents with versatile tool APIs","topicNames":["large-language-models","llm","llm-agents"],"topicsNotShown":0,"allTopics":["large-language-models","llm","llm-agents"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":1,"issueCount":2,"starsCount":220,"forksCount":22,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-23T13:46:56.847Z"}},{"type":"Public","name":"xtuner","owner":"InternLM","isFork":false,"description":"An efficient, flexible and full-featured toolkit for fine-tuning large models (InternLM2, Llama3, Phi3, Qwen, Mistral, ...)","topicNames":["agent","chatbot","conversational-ai","peft","baichuan","msagent","large-language-models","llm","supervised-finetuning","llava"],"topicsNotShown":9,"allTopics":["agent","chatbot","conversational-ai","peft","baichuan","msagent","large-language-models","llm","supervised-finetuning","llava","llm-training","chatglm2","internlm","llama2","qwen","chatglm3","mixtral","llama3","phi3"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":27,"issueCount":70,"starsCount":2824,"forksCount":221,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-23T08:12:59.282Z"}},{"type":"Public","name":"Tutorial","owner":"InternLM","isFork":false,"description":"LLM Tutorial","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":17,"issueCount":29,"starsCount":767,"forksCount":166,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-23T06:51:47.403Z"}},{"type":"Public","name":"lagent","owner":"InternLM","isFork":false,"description":"A lightweight framework for building LLM-based agents","topicNames":["agent","transformers","gpt","llm"],"topicsNotShown":0,"allTopics":["agent","transformers","gpt","llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":8,"issueCount":9,"starsCount":876,"forksCount":94,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-11T06:58:55.828Z"}},{"type":"Public","name":"InternLM-XComposer","owner":"InternLM","isFork":false,"description":"InternLM-XComposer2 is a groundbreaking vision-language large model (VLLM) excelling in free-form text-image composition and comprehension. ","topicNames":["foundation","gpt","language-model","multimodal","multi-modality","vision-transformer","gpt-4","visual-language-learning","llm","chatgpt"],"topicsNotShown":6,"allTopics":["foundation","gpt","language-model","multimodal","multi-modality","vision-transformer","gpt-4","visual-language-learning","llm","chatgpt","instruction-tuning","large-language-model","supervised-finetuning","mllm","vision-language-model","large-vision-language-model"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":10,"issueCount":86,"starsCount":1759,"forksCount":118,"license":null,"participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-05-08T18:32:58.760Z"}},{"type":"Public","name":"InternLM","owner":"InternLM","isFork":false,"description":"Official release of InternLM2 7B and 20B base and chat models. 200K context support","topicNames":["chatbot","chinese","gpt","pretrained-models","llm","long-context","rlhf","large-language-model","flash-attention","fine-tuning-llm"],"topicsNotShown":0,"allTopics":["chatbot","chinese","gpt","pretrained-models","llm","long-context","rlhf","large-language-model","flash-attention","fine-tuning-llm"],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":3,"starsCount":5308,"forksCount":379,"license":"Apache License 2.0","participation":[0,0,0,0,0,17,13,10,15,12,9,10,22,10,25,10,6,0,0,0,0,3,2,0,0,0,0,0,1,1,2,1,0,21,15,3,0,0,2,2,1,1,0,0,1,2,0,1,0,0,0,0],"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-04-22T09:03:47.313Z"}},{"type":"Public","name":"InternLM-Math","owner":"InternLM","isFork":false,"description":"","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":0,"issueCount":3,"starsCount":197,"forksCount":12,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-02-14T04:51:26.369Z"}},{"type":"Public","name":"InternEvo_bak","owner":"InternLM","isFork":true,"description":"InternEvo is a high-performance training system for giant models.","topicNames":[],"topicsNotShown":0,"allTopics":[],"primaryLanguage":{"name":"Python","color":"#3572A5"},"pullRequestCount":9,"issueCount":0,"starsCount":1,"forksCount":379,"license":"Apache License 2.0","participation":null,"lastUpdated":{"hasBeenPushedTo":true,"timestamp":"2024-01-17T08:43:38.608Z"}}],"repositoryCount":11,"userInfo":null,"searchable":true,"definitions":[],"typeFilters":[{"id":"all","text":"All"},{"id":"public","text":"Public"},{"id":"source","text":"Sources"},{"id":"fork","text":"Forks"},{"id":"archived","text":"Archived"},{"id":"template","text":"Templates"}],"compactMode":false},"title":"Repositories"}