From f686afd4890e70bbcf62baa3e6ce86e0ba423a48 Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Sat, 3 Feb 2024 07:10:21 -0600 Subject: [PATCH 01/76] Add augur api start Signed-off-by: Andrew Brain --- augur/application/cli/_cli_util.py | 69 +++++++++++++ augur/application/cli/api.py | 157 +++++++++++++++++++++++++++++ augur/application/cli/backend.py | 1 - 3 files changed, 226 insertions(+), 1 deletion(-) create mode 100644 augur/application/cli/_cli_util.py create mode 100644 augur/application/cli/api.py diff --git a/augur/application/cli/_cli_util.py b/augur/application/cli/_cli_util.py new file mode 100644 index 0000000000..d5837f9f90 --- /dev/null +++ b/augur/application/cli/_cli_util.py @@ -0,0 +1,69 @@ +import resource +import os +import subprocess +import psutil +import signal +from urllib.parse import urlparse + +from augur.tasks.init.redis_connection import redis_connection + +def clear_redis_caches(logger): + """Clears the redis databases that celery and redis use.""" + + logger.info("Flushing all redis databases this instance was using") + celery_purge_command = "celery -A augur.tasks.init.celery_app.celery_app purge -f" + subprocess.call(celery_purge_command.split(" ")) + redis_connection.flushdb() + + +def clear_rabbitmq_messages(connection_string, queues, logger): + #virtual_host_string = connection_string.split("/")[-1] + + logger.info("Clearing all messages from celery queue in rabbitmq") + from augur.tasks.init.celery_app import celery_app + celery_app.control.purge() + + clear_message_queues(connection_string, queues) + + +def clear_message_queues(connection_string, queues): + queues = ['celery','secondary','scheduling','facade'] + + virtual_host_string = connection_string.split("/")[-1] + + #Parse username and password with urllib + parsed = urlparse(connection_string) + + for q in queues: + curl_cmd = f"curl -i -u {parsed.username}:{parsed.password} -XDELETE http://localhost:15672/api/queues/{virtual_host_string}/{q}" + subprocess.call(curl_cmd.split(" "),stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + +def _broadcast_signal_to_processes(processes, logger, broadcast_signal=signal.SIGTERM): + + for process in processes: + if process.pid != os.getpid(): + logger.info(f"Stopping process {process.pid}") + try: + process.send_signal(broadcast_signal) + except psutil.NoSuchProcess: + pass + + +def raise_open_file_limit(num_files): + """ + sets number of open files soft limit + """ + current_soft, current_hard = resource.getrlimit(resource.RLIMIT_NOFILE) + + # if soft is already greater than the requested amount then don't change it + if current_soft > num_files: + return + + # if the requested amount is greater than the hard limit then set the hard limit to the num_files value + if current_hard <= num_files: + current_hard = num_files + + resource.setrlimit(resource.RLIMIT_NOFILE, (num_files, current_hard)) + + return \ No newline at end of file diff --git a/augur/application/cli/api.py b/augur/application/cli/api.py new file mode 100644 index 0000000000..7a954c688b --- /dev/null +++ b/augur/application/cli/api.py @@ -0,0 +1,157 @@ +#SPDX-License-Identifier: MIT +""" +Augur library commands for controlling the backend components +""" +import os +import time +import subprocess +import click +import logging +import psutil +import signal +import uuid +import traceback + +from augur.application.db.session import DatabaseSession +from augur.application.logs import AugurLogger +from augur.application.config import AugurConfig +from augur.application.cli import test_connection, test_db_connection +from augur.application.cli._cli_util import _broadcast_signal_to_processes, raise_open_file_limit, clear_redis_caches, clear_rabbitmq_messages + +logger = AugurLogger("augur", reset_logfiles=True).get_logger() + +@click.group('api', short_help='Commands for controlling the backend API server') +def cli(): + pass + +@cli.command("start") +@click.option("--development", is_flag=True, default=False, help="Enable development mode") +@click.option('--port') +@test_connection +@test_db_connection +def start(development, port): + """Start Augur's backend server.""" + + try: + if os.environ.get('AUGUR_DOCKER_DEPLOY') != "1": + raise_open_file_limit(100000) + except Exception as e: + logger.error( + ''.join(traceback.format_exception(None, e, e.__traceback__))) + + logger.error("Failed to raise open file limit!") + raise e + + if development: + os.environ["AUGUR_DEV"] = "1" + logger.info("Starting in development mode") + + try: + gunicorn_location = os.getcwd() + "/augur/api/gunicorn_conf.py" + except FileNotFoundError: + logger.error("\n\nPlease run augur commands in the root directory\n\n") + + with DatabaseSession(logger) as db_session: + config = AugurConfig(logger, db_session) + host = config.get_value("Server", "host") + + if not port: + port = config.get_value("Server", "port") + + gunicorn_command = f"gunicorn -c {gunicorn_location} -b {host}:{port} augur.api.server:app --log-file gunicorn.log" + server = subprocess.Popen(gunicorn_command.split(" ")) + + time.sleep(3) + logger.info('Gunicorn webserver started...') + logger.info(f'Augur is running at: {"http" if development else "https"}://{host}:{port}') + + frontend_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=1 -n frontend:{uuid.uuid4().hex}@%h -Q frontend" + frontend_worker_process = subprocess.Popen(frontend_worker.split(" ")) + + try: + server.wait() + except KeyboardInterrupt: + + if server: + logger.info("Shutting down server") + server.terminate() + + logger.info("Shutting down frontend celery worker process") + if frontend_worker_process: + frontend_worker_process.terminate() + +@cli.command('stop') +def stop(): + """ + Sends SIGTERM to all Augur api processes + """ + logger = logging.getLogger("augur.cli") + + augur_stop(signal.SIGTERM, logger) + +@cli.command('kill') +def kill(): + """ + Sends SIGKILL to all Augur api processes + """ + logger = logging.getLogger("augur.cli") + augur_stop(signal.SIGKILL, logger) + +@cli.command('processes') +def processes(): + """ + Outputs the name/PID of all Augur api process""" + augur_processes = get_augur_api_processes() + for process in augur_processes: + logger.info(f"Found process {process.pid}") + +def augur_stop(signal, logger): + """ + Stops augur with the given signal, + and cleans up the api + """ + + augur_processes = get_augur_api_processes() + + _broadcast_signal_to_processes(augur_processes, logger=logger, broadcast_signal=signal) + + cleanup_after_api_halt(logger) + + +def cleanup_after_api_halt(logger): + + connection_string = "" + queues = ['frontend','celery'] + with DatabaseSession(logger) as session: + config = AugurConfig(logger, session) + connection_string = config.get_section("RabbitMQ")['connection_string'] + + clear_rabbitmq_messages(connection_string, queues, logger) + clear_redis_caches(logger) + +def get_augur_api_processes(): + augur_api_processes = [] + for process in psutil.process_iter(['cmdline', 'name', 'environ']): + if process.info['cmdline'] is not None and process.info['environ'] is not None: + try: + if is_api_process(process): + augur_api_processes.append(process) + except (KeyError, FileNotFoundError): + pass + return augur_api_processes + +def is_api_process(process): + + command = ''.join(process.info['cmdline'][:]).lower() + if os.getenv('VIRTUAL_ENV') in process.info['environ']['VIRTUAL_ENV'] and 'python' in command: + + if process.pid != os.getpid(): + + if ("augur.api.server:app" in command or + "augurbackendapi" in command or + ("augur.tasks.init.celery_app.celery_app" in command and "frontend" in command)): + return True + + return False + + diff --git a/augur/application/cli/backend.py b/augur/application/cli/backend.py index fc466f021c..554ad69b31 100644 --- a/augur/application/cli/backend.py +++ b/augur/application/cli/backend.py @@ -18,7 +18,6 @@ from urllib.parse import urlparse from datetime import datetime -from augur import instance_id from augur.tasks.start_tasks import augur_collection_monitor, CollectionState, create_collection_status_records from augur.tasks.git.facade_tasks import clone_repos from augur.tasks.data_analysis.contributor_breadth_worker.contributor_breadth_worker import contributor_breadth_model From 5743e4ff36c0c037d8c5d237172d14b5e370166b Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Sat, 3 Feb 2024 07:40:23 -0600 Subject: [PATCH 02/76] Add augur collection start and stop commands Signed-off-by: Andrew Brain --- augur/application/cli/collection.py | 301 ++++++++++++++++++++++++++++ 1 file changed, 301 insertions(+) create mode 100644 augur/application/cli/collection.py diff --git a/augur/application/cli/collection.py b/augur/application/cli/collection.py new file mode 100644 index 0000000000..d7e1a2ad2f --- /dev/null +++ b/augur/application/cli/collection.py @@ -0,0 +1,301 @@ +#SPDX-License-Identifier: MIT +""" +Augur library commands for controlling the backend components +""" +import resource +import os +import time +import subprocess +import click +import logging +import psutil +import signal +from redis.exceptions import ConnectionError as RedisConnectionError +import uuid +import traceback +import sqlalchemy as s + +from augur.tasks.start_tasks import augur_collection_monitor, create_collection_status_records +from augur.tasks.git.facade_tasks import clone_repos +from augur.tasks.data_analysis.contributor_breadth_worker.contributor_breadth_worker import contributor_breadth_model +from augur.application.db.models import UserRepo +from augur.application.db.session import DatabaseSession +from augur.application.logs import AugurLogger +from augur.application.config import AugurConfig +from augur.application.cli import test_connection, test_db_connection +from augur.application.cli._cli_util import _broadcast_signal_to_processes, raise_open_file_limit, clear_redis_caches, clear_rabbitmq_messages + +logger = AugurLogger("augur", reset_logfiles=True).get_logger() + +@click.group('server', short_help='Commands for controlling the backend API server & data collection workers') +def cli(): + pass + +@cli.command("start") +@click.option("--development", is_flag=True, default=False, help="Enable development mode, implies --disable-collection") +@test_connection +@test_db_connection +def start(development): + """Start Augur's backend server.""" + + try: + if os.environ.get('AUGUR_DOCKER_DEPLOY') != "1": + raise_open_file_limit(100000) + except Exception as e: + logger.error( + ''.join(traceback.format_exception(None, e, e.__traceback__))) + + logger.error("Failed to raise open file limit!") + raise e + + if development: + os.environ["AUGUR_DEV"] = "1" + logger.info("Starting in development mode") + + with DatabaseSession(logger) as db_session: + config = AugurConfig(logger, db_session) + + worker_vmem_cap = config.get_value("Celery", 'worker_process_vmem_cap') + + processes = start_celery_collection_processes(float(worker_vmem_cap)) + + if os.path.exists("celerybeat-schedule.db"): + logger.info("Deleting old task schedule") + os.remove("celerybeat-schedule.db") + + with DatabaseSession(logger) as db_session: + config = AugurConfig(logger, db_session) + log_level = config.get_value("Logging", "log_level") + celery_beat_process = None + celery_command = f"celery -A augur.tasks.init.celery_app.celery_app beat -l {log_level.lower()}" + celery_beat_process = subprocess.Popen(celery_command.split(" ")) + + + with DatabaseSession(logger) as session: + + clean_collection_status(session) + assign_orphan_repos_to_default_user(session) + + create_collection_status_records.si().apply_async() + time.sleep(3) + + contributor_breadth_model.si().apply_async() + + # start cloning repos when augur starts + clone_repos.si().apply_async() + + augur_collection_monitor.si().apply_async() + + + try: + processes[0].wait() + except KeyboardInterrupt: + + logger.info("Shutting down all celery worker processes") + for p in processes: + if p: + p.terminate() + + if celery_beat_process: + logger.info("Shutting down celery beat process") + celery_beat_process.terminate() + try: + cleanup_after_collection_halt(logger) + except RedisConnectionError: + pass + +def start_celery_collection_processes(vmem_cap_ratio): + + #Calculate process scaling based on how much memory is available on the system in bytes. + #Each celery process takes ~500MB or 500 * 1024^2 bytes + + process_list = [] + + #Cap memory usage to 30% of total virtual memory + available_memory_in_bytes = psutil.virtual_memory().total * vmem_cap_ratio + available_memory_in_megabytes = available_memory_in_bytes / (1024 ** 2) + max_process_estimate = available_memory_in_megabytes // 500 + sleep_time = 0 + + #Get a subset of the maximum procesess available using a ratio, not exceeding a maximum value + def determine_worker_processes(ratio,maximum): + return max(min(round(max_process_estimate * ratio),maximum),1) + + #2 processes are always reserved as a baseline. + scheduling_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency=2 -n scheduling:{uuid.uuid4().hex}@%h -Q scheduling" + max_process_estimate -= 2 + process_list.append(subprocess.Popen(scheduling_worker.split(" "))) + sleep_time += 6 + + #60% of estimate, Maximum value of 45 + core_num_processes = determine_worker_processes(.6, 45) + logger.info(f"Starting core worker processes with concurrency={core_num_processes}") + core_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency={core_num_processes} -n core:{uuid.uuid4().hex}@%h" + process_list.append(subprocess.Popen(core_worker.split(" "))) + sleep_time += 6 + + #20% of estimate, Maximum value of 25 + secondary_num_processes = determine_worker_processes(.25, 25) + logger.info(f"Starting secondary worker processes with concurrency={secondary_num_processes}") + secondary_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency={secondary_num_processes} -n secondary:{uuid.uuid4().hex}@%h -Q secondary" + process_list.append(subprocess.Popen(secondary_worker.split(" "))) + sleep_time += 6 + + #15% of estimate, Maximum value of 20 + facade_num_processes = determine_worker_processes(.15, 20) + logger.info(f"Starting facade worker processes with concurrency={facade_num_processes}") + facade_worker = f"celery -A augur.tasks.init.celery_app.celery_app worker -l info --concurrency={facade_num_processes} -n facade:{uuid.uuid4().hex}@%h -Q facade" + + process_list.append(subprocess.Popen(facade_worker.split(" "))) + sleep_time += 6 + + time.sleep(sleep_time) + + return process_list + + +@cli.command('stop') +def stop(): + """ + Sends SIGTERM to all Augur server & worker processes + """ + logger = logging.getLogger("augur.cli") + + augur_stop(signal.SIGTERM, logger) + +@cli.command('kill') +def kill(): + """ + Sends SIGKILL to all Augur server & worker processes + """ + logger = logging.getLogger("augur.cli") + augur_stop(signal.SIGKILL, logger) + +@cli.command('repo-reset') +@test_connection +@test_db_connection +def repo_reset(augur_app): + """ + Refresh repo collection to force data collection + """ + augur_app.database.execute(s.sql.text(""" + UPDATE augur_operations.collection_status + SET core_status='Pending',core_task_id = NULL, core_data_last_collected = NULL; + + UPDATE augur_operations.collection_status + SET secondary_status='Pending',secondary_task_id = NULL, secondary_data_last_collected = NULL; + + UPDATE augur_operations.collection_status + SET facade_status='Pending', facade_task_id=NULL, facade_data_last_collected = NULL; + + TRUNCATE augur_data.commits CASCADE; + """)) + + logger.info("Repos successfully reset") + +@cli.command('processes') +def processes(): + """ + Outputs the name/PID of all Augur server & worker processes""" + augur_processes = get_augur_collection_processes() + for process in augur_processes: + logger.info(f"Found process {process.pid}") + +def get_augur_collection_processes(): + augur_processes = [] + for process in psutil.process_iter(['cmdline', 'name', 'environ']): + if process.info['cmdline'] is not None and process.info['environ'] is not None: + try: + if is_collection_process(process): + augur_processes.append(process) + except (KeyError, FileNotFoundError): + pass + return augur_processes + +def is_collection_process(process): + + command = ''.join(process.info['cmdline'][:]).lower() + if os.getenv('VIRTUAL_ENV') in process.info['environ']['VIRTUAL_ENV'] and 'python' in command: + if process.pid != os.getpid(): + + if "augurbackendcollection" in command or "celery_app.celery_appbeat" in command: + return True + if "augur.tasks.init.celery_app.celery_app" in command: + + if ("scheduling" in command or + "facade" in command or + "secondary" in command or + "core" in command): + + return True + + return False + + +def augur_stop(signal, logger): + """ + Stops augur with the given signal, + and cleans up collection if it was running + """ + + augur_collection_processes = get_augur_collection_processes() + + _broadcast_signal_to_processes(augur_collection_processes, logger=logger, broadcast_signal=signal) + + cleanup_after_collection_halt(logger) + +def cleanup_after_collection_halt(logger): + + queues = ['celery', 'core', 'secondary','scheduling','facade'] + connection_string = "" + with DatabaseSession(logger) as session: + config = AugurConfig(logger, session) + connection_string = config.get_section("RabbitMQ")['connection_string'] + + clean_collection_status(session) + + clear_rabbitmq_messages(connection_string, queues, logger) + clear_redis_caches(logger) + +#Make sure that database reflects collection status when processes are killed/stopped. +def clean_collection_status(session): + session.execute_sql(s.sql.text(""" + UPDATE augur_operations.collection_status + SET core_status='Pending',core_task_id = NULL + WHERE core_status='Collecting' AND core_data_last_collected IS NULL; + + UPDATE augur_operations.collection_status + SET core_status='Success',core_task_id = NULL + WHERE core_status='Collecting' AND core_data_last_collected IS NOT NULL; + + UPDATE augur_operations.collection_status + SET secondary_status='Pending',secondary_task_id = NULL + WHERE secondary_status='Collecting' AND secondary_data_last_collected IS NULL; + + UPDATE augur_operations.collection_status + SET secondary_status='Success',secondary_task_id = NULL + WHERE secondary_status='Collecting' AND secondary_data_last_collected IS NOT NULL; + + UPDATE augur_operations.collection_status + SET facade_status='Update', facade_task_id=NULL + WHERE facade_status LIKE '%Collecting%' and facade_data_last_collected IS NULL; + + UPDATE augur_operations.collection_status + SET facade_status='Success', facade_task_id=NULL + WHERE facade_status LIKE '%Collecting%' and facade_data_last_collected IS NOT NULL; + + UPDATE augur_operations.collection_status + SET facade_status='Pending', facade_task_id=NULL + WHERE facade_status='Failed Clone' OR facade_status='Initializing'; + """)) + #TODO: write timestamp for currently running repos. + +def assign_orphan_repos_to_default_user(session): + query = s.sql.text(""" + SELECT repo_id FROM repo WHERE repo_id NOT IN (SELECT repo_id FROM augur_operations.user_repos) + """) + + repos = session.execute_sql(query).fetchall() + + for repo in repos: + UserRepo.insert(session,repo[0],1) \ No newline at end of file From 13661053787ba3a55ff7ed4f41488711492d55aa Mon Sep 17 00:00:00 2001 From: Andrew Brain Date: Sat, 17 Feb 2024 18:23:40 -0600 Subject: [PATCH 03/76] Start adding issue contributors Signed-off-by: Andrew Brain --- augur/application/db/data_parse.py | 49 ++++++++++++++++++++++++++++-- augur/tasks/gitlab/issues_task.py | 25 ++++++++++++--- augur/tasks/start_tasks.py | 18 +++++------ augur/tasks/util/AugurUUID.py | 13 ++++++++ 4 files changed, 89 insertions(+), 16 deletions(-) diff --git a/augur/application/db/data_parse.py b/augur/application/db/data_parse.py index 7562181398..cab707f282 100644 --- a/augur/application/db/data_parse.py +++ b/augur/application/db/data_parse.py @@ -2,7 +2,7 @@ This file contains functions that take the api response and return only the data that the database needs """ -from augur.tasks.util.AugurUUID import GithubUUID +from augur.tasks.util.AugurUUID import GithubUUID, GitlabUUID import sqlalchemy as s from typing import List @@ -689,6 +689,51 @@ def extract_needed_contributor_data(contributor, tool_source, tool_version, data return contributor +def extract_needed_gitlab_contributor_data(contributor, tool_source, tool_version, data_source): + + if not contributor: + return None + + cntrb_id = GitlabUUID() + cntrb_id["user"] = contributor["id"] + + contributor = { + "cntrb_id": cntrb_id.to_UUID(), + "cntrb_login": contributor['username'], + "cntrb_created_at": contributor['created_at'] if 'created_at' in contributor else None, + "cntrb_email": contributor['email'] if 'email' in contributor else None, + "cntrb_company": contributor['company'] if 'company' in contributor else None, + "cntrb_location": contributor['location'] if 'location' in contributor else None, + # "cntrb_type": , dont have a use for this as of now ... let it default to null + "cntrb_canonical": contributor['email'] if 'email' in contributor else None, + "gh_user_id": contributor['id'], + "gh_login": str(contributor['username']), ## cast as string by SPG on 11/28/2021 due to `nan` user + "gh_url": contributor['web_url'], + "gh_html_url": None, + "gh_node_id": None, + "gh_avatar_url": contributor['avatar_url'], + "gh_gravatar_id": None, + "gh_followers_url": None, + "gh_following_url": None, + "gh_gists_url": None, + "gh_starred_url": None, + "gh_subscriptions_url": None, + "gh_organizations_url": None, + "gh_repos_url": None, + "gh_events_url": None, + "gh_received_events_url": None, + "gh_type": None, + "gh_site_admin": None, + "cntrb_last_used" : None, + "cntrb_full_name" : None, + "tool_source": tool_source, + "tool_version": tool_version, + "data_source": data_source + } + + return contributor + + def extract_needed_clone_history_data(clone_history_data:List[dict], repo_id:int): if len(clone_history_data) == 0: @@ -811,7 +856,7 @@ def extract_needed_issue_data_from_gitlab_issue(issue: dict, repo_id: int, tool_ issue_dict = { "repo_id": repo_id, - "reporter_id": None, + "reporter_id": issue['cntrb_id'], "pull_request": None, "pull_request_id": None, "created_at": issue['created_at'], diff --git a/augur/tasks/gitlab/issues_task.py b/augur/tasks/gitlab/issues_task.py index cf6e5e5dab..0dded64c85 100644 --- a/augur/tasks/gitlab/issues_task.py +++ b/augur/tasks/gitlab/issues_task.py @@ -8,7 +8,7 @@ from augur.tasks.init.celery_app import AugurCoreRepoCollectionTask from augur.tasks.gitlab.gitlab_api_handler import GitlabApiHandler from augur.tasks.gitlab.gitlab_task_session import GitlabTaskManifest -from augur.application.db.data_parse import extract_needed_issue_data_from_gitlab_issue, extract_needed_gitlab_issue_label_data, extract_needed_gitlab_issue_assignee_data, extract_needed_gitlab_issue_message_ref_data, extract_needed_gitlab_message_data +from augur.application.db.data_parse import extract_needed_issue_data_from_gitlab_issue, extract_needed_gitlab_issue_label_data, extract_needed_gitlab_issue_assignee_data, extract_needed_gitlab_issue_message_ref_data, extract_needed_gitlab_message_data, extract_needed_gitlab_contributor_data from augur.tasks.github.util.util import get_owner_repo, add_key_value_pair_to_dicts from augur.application.db.models import Issue, IssueLabel, IssueAssignee, IssueMessageRef, Message, Repo from augur.application.db.util import execute_session_query @@ -50,8 +50,6 @@ def collect_gitlab_issues(repo_git : str) -> int: logger.error(f"Could not collect gitlab issues for repo {repo_git}\n Reason: {e} \n Traceback: {''.join(traceback.format_exception(None, e, e.__traceback__))}") return -1 - - def retrieve_all_gitlab_issue_data(repo_git, logger, key_auth) -> None: """ Retrieve only the needed data for issues from the api response @@ -108,10 +106,15 @@ def process_issues(issues, task_name, repo_id, logger, augur_db) -> None: issue_dicts = [] issue_ids = [] issue_mapping_data = {} + contributors = [] for issue in issues: issue_ids.append(issue["iid"]) + issue, contributor_data = process_issue_contributors(issue, tool_source, tool_version, data_source) + + contributors += contributor_data + issue_dicts.append( extract_needed_issue_data_from_gitlab_issue(issue, repo_id, tool_source, tool_version, data_source) ) @@ -175,7 +178,21 @@ def process_issues(issues, task_name, repo_id, logger, augur_db) -> None: return issue_ids +def process_issue_contributors(issue, tool_source, tool_version, data_source): + + contributors = [] + issue_cntrb = extract_needed_gitlab_contributor_data(issue["author"], tool_source, tool_version, data_source) + issue["cntrb_id"] = issue_cntrb["cntrb_id"] + contributors.append(issue_cntrb) + + # for assignee in issue["assignees"]: + + # issue_assignee_cntrb = extract_needed_contributor_data(assignee, tool_source, tool_version, data_source) + # assignee["cntrb_id"] = issue_assignee_cntrb["cntrb_id"] + # contributors.append(issue_assignee_cntrb) + + return issue, contributors @celery.task(base=AugurCoreRepoCollectionTask) def collect_gitlab_issue_comments(issue_ids, repo_git) -> int: @@ -316,5 +333,3 @@ def process_gitlab_issue_messages(data, task_name, repo_id, logger, augur_db): logger.info(f"{task_name}: Inserting {len(issue_message_ref_dicts)} gitlab issue messages ref rows") issue_message_ref_natural_keys = ["issue_id", "issue_msg_ref_src_comment_id"] augur_db.insert_data(issue_message_ref_dicts, IssueMessageRef, issue_message_ref_natural_keys) - - diff --git a/augur/tasks/start_tasks.py b/augur/tasks/start_tasks.py index b0badb89d2..08c00a85ef 100644 --- a/augur/tasks/start_tasks.py +++ b/augur/tasks/start_tasks.py @@ -101,14 +101,14 @@ def primary_repo_collect_phase_gitlab(repo_git): logger = logging.getLogger(primary_repo_collect_phase_gitlab.__name__) jobs = group( - chain(collect_gitlab_merge_requests.si(repo_git), group( - #collect_merge_request_comments.s(repo_git), - #collect_merge_request_reviewers.s(repo_git), - collect_merge_request_metadata.s(repo_git), - collect_merge_request_commits.s(repo_git), - collect_merge_request_files.s(repo_git), - collect_gitlab_merge_request_events.si(repo_git), - )), + # chain(collect_gitlab_merge_requests.si(repo_git), group( + # #collect_merge_request_comments.s(repo_git), + # #collect_merge_request_reviewers.s(repo_git), + # collect_merge_request_metadata.s(repo_git), + # collect_merge_request_commits.s(repo_git), + # collect_merge_request_files.s(repo_git), + # collect_gitlab_merge_request_events.si(repo_git), + # )), chain(collect_gitlab_issues.si(repo_git), group( #collect_gitlab_issue_comments.s(repo_git), collect_gitlab_issue_events.si(repo_git), @@ -339,7 +339,7 @@ def retry_errored_repos(): #TODO: Isaac needs to normalize the status's to be abstract in the #collection_status table once augur dev is less unstable. with DatabaseSession(logger,engine) as session: - query = s.sql.text(f"""UPDATE repo SET secondary_staus = {CollectionState.PENDING.value}""" + query = s.sql.text(f"""UPDATE repo SET secondary_status = {CollectionState.PENDING.value}""" f""" WHERE secondary_status = '{CollectionState.ERROR.value}' ;""" f"""UPDATE repo SET core_status = {CollectionState.PENDING.value}""" f""" WHERE core_status = '{CollectionState.ERROR.value}' ;""" diff --git a/augur/tasks/util/AugurUUID.py b/augur/tasks/util/AugurUUID.py index 5dfabc8ac4..ae8f05f124 100644 --- a/augur/tasks/util/AugurUUID.py +++ b/augur/tasks/util/AugurUUID.py @@ -129,6 +129,19 @@ class GithubUUID(AugurUUID): def __init__(self): super().__init__(platform = 1) +class GitlabUUID(AugurUUID): + struct = { + "platform": {"start": 0, "size": 1}, + "user": {"start": 1, "size": 4}, + "repo": {"start": 5, "size": 3}, + "issue": {"start": 8, "size": 4}, + "event": {"start": 12, "size": 4}, + "metadata": {"start": 12, "size": 4} + } + + def __init__(self): + super().__init__(platform = 2) + class UnresolvableUUID(GithubUUID): def __init__(self): super(GithubUUID, self).__init__(platform = 0) From f8eed39d05a22fe273e549eca8fa2f3f17db9c8c Mon Sep 17 00:00:00 2001 From: Ulincsys Date: Sun, 18 Feb 2024 20:31:55 -0600 Subject: [PATCH 04/76] Update settings page Signed-off-by: Ulincsys --- augur/static/css/dashboard.css | 22 +- augur/templates/new_settings.j2 | 347 --------------- augur/templates/settings.j2 | 718 +++++++++++++++++++------------- augur/templates/settings_old.j2 | 140 ------- 4 files changed, 447 insertions(+), 780 deletions(-) delete mode 100644 augur/templates/new_settings.j2 delete mode 100644 augur/templates/settings_old.j2 diff --git a/augur/static/css/dashboard.css b/augur/static/css/dashboard.css index 08b98b3785..ef111c32a4 100644 --- a/augur/static/css/dashboard.css +++ b/augur/static/css/dashboard.css @@ -26,12 +26,30 @@ body { } .nav-pills .nav-link.active, .nav-pills .show > .nav-link { - background-color: var(--color-accent); + background-color: var(--color-accent) } .dashboard-sidebar { - width: 280px; background-color: var(--color-bg-light) !important; + color: var(--color-fg) !important; + max-height: 100vh; +} + +.nav-link { + color: var(--color-fg); +} + +.nav-pills li:has(a:not(.active)) :hover { + color: var(--color-notice); +} + +.nav-pills li { + width: 100%; +} + +.nav-pills li a { + padding-left: 10px !important; + padding-right: 10px !important; } .dashboard-form-control { diff --git a/augur/templates/new_settings.j2 b/augur/templates/new_settings.j2 deleted file mode 100644 index 74a14ed575..0000000000 --- a/augur/templates/new_settings.j2 +++ /dev/null @@ -1,347 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - Settings - Augur View - - - - - - {% include 'notifications.j2' %} -
-
-
-
- Settings -
-
- -
- -
- {# Start dashboard content #} -
-
-

Profile

- {# Start content card #} -
-
- {# Start form body #} -
-
-
-
-

{{ current_user.id }}

- Delete Account -
- -
-
-
-

Update Password

- -
-
-
-
-
- - -
-
-
- - - - - - - \ No newline at end of file diff --git a/augur/templates/settings.j2 b/augur/templates/settings.j2 index 31230897c4..c75b6522ad 100644 --- a/augur/templates/settings.j2 +++ b/augur/templates/settings.j2 @@ -27,294 +27,385 @@ Settings - Augur View - + {% include 'notifications.j2' %} -
-
-
-
- Settings -
-
- -
-