Skip to content

Commit

Permalink
Merge pull request #2747 from chaoss/dev
Browse files Browse the repository at this point in the history
Release
  • Loading branch information
sgoggins committed Mar 19, 2024
2 parents 9d69d7b + ffbac7b commit f919e92
Show file tree
Hide file tree
Showing 154 changed files with 2,492 additions and 1,929 deletions.
5 changes: 3 additions & 2 deletions README.md
@@ -1,4 +1,4 @@
# Augur NEW Release v0.62.6
# Augur NEW Release v0.63.0

Augur is primarily a data engineering tool that makes it possible for data scientists to gather open source software community data. Less data carpentry for everyone else!
The primary way of looking at Augur data is through [8Knot](https://github.com/oss-aspen/8knot) ... A public instance of 8Knot is available at https://metrix.chaoss.io ... That is tied to a public instance of Augur at https://ai.chaoss.io
Expand All @@ -10,7 +10,8 @@ The primary way of looking at Augur data is through [8Knot](https://github.com/o
## NEW RELEASE ALERT!
### [If you want to jump right in, updated docker build/compose and bare metal installation instructions are available here](docs/new-install.md)

Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.62.6
Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.63.0

- The `main` branch is a stable version of our new architecture, which features:
- Dramatic improvement in the speed of large scale data collection (100,000+ repos). All data is obtained for 100k+ repos within 2 weeks.
- A new job management architecture that uses Celery and Redis to manage queues, and enables users to run a Flower job monitoring dashboard
Expand Down
63 changes: 32 additions & 31 deletions augur/api/gunicorn_conf.py
@@ -1,47 +1,48 @@
# from augur import ROOT_AUGUR_DIRECTORY
import multiprocessing
import logging
import os
from pathlib import Path
from glob import glob
import shutil

from augur.application.db.session import DatabaseSession
from augur.application.config import AugurConfig
from augur.application.db.lib import get_value, get_section
from augur.application.db import dispose_database_engine

logger = logging.getLogger(__name__)
with DatabaseSession(logger) as session:

augur_config = AugurConfig(logger, session)


# ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

# base_log_dir = ROOT_AUGUR_DIRECTORY + "/logs/"
# ROOT_AUGUR_DIRECTORY = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))

# base_log_dir = ROOT_AUGUR_DIRECTORY + "/logs/"

# Path(base_log_dir).mkdir(exist_ok=True)

# Path(base_log_dir).mkdir(exist_ok=True)
workers = multiprocessing.cpu_count() * 2 + 1
umask = 0o007
reload = True
reload_extra_files = glob(str(Path.cwd() / '**/*.j2'), recursive=True)

workers = multiprocessing.cpu_count() * 2 + 1
umask = 0o007
reload = True
reload_extra_files = glob(str(Path.cwd() / '**/*.j2'), recursive=True)
# set the log location for gunicorn
logs_directory = get_value('Logging', 'logs_directory')
accesslog = f"{logs_directory}/gunicorn.log"
errorlog = f"{logs_directory}/gunicorn.log"

# set the log location for gunicorn
logs_directory = augur_config.get_value('Logging', 'logs_directory')
accesslog = f"{logs_directory}/gunicorn.log"
errorlog = f"{logs_directory}/gunicorn.log"
ssl_bool = get_value('Server', 'ssl')

if ssl_bool is True:

workers = int(get_value('Server', 'workers'))
bind = '%s:%s' % (get_value("Server", "host"), get_value("Server", "port"))
timeout = int(get_value('Server', 'timeout'))
certfile = str(get_value('Server', 'ssl_cert_file'))
keyfile = str(get_value('Server', 'ssl_key_file'))

else:
workers = int(get_value('Server', 'workers'))
bind = '%s:%s' % (get_value("Server", "host"), get_value("Server", "port"))
timeout = int(get_value('Server', 'timeout'))

ssl_bool = augur_config.get_value('Server', 'ssl')

if ssl_bool is True:
def worker_exit(server, worker):
print("Stopping gunicorn worker process")
dispose_database_engine()

workers = int(augur_config.get_value('Server', 'workers'))
bind = '%s:%s' % (augur_config.get_value("Server", "host"), augur_config.get_value("Server", "port"))
timeout = int(augur_config.get_value('Server', 'timeout'))
certfile = str(augur_config.get_value('Server', 'ssl_cert_file'))
keyfile = str(augur_config.get_value('Server', 'ssl_key_file'))

else:
workers = int(augur_config.get_value('Server', 'workers'))
bind = '%s:%s' % (augur_config.get_value("Server", "host"), augur_config.get_value("Server", "port"))
timeout = int(augur_config.get_value('Server', 'timeout'))
1 change: 0 additions & 1 deletion augur/api/metrics/README.md
Expand Up @@ -14,7 +14,6 @@ import datetime
import sqlalchemy as s
import pandas as pd
from augur.api.util import register_metric
from augur.application.db.engine import engine
```
3. Defining the function
1. Add the decorator @register_metric to the function
Expand Down
18 changes: 9 additions & 9 deletions augur/api/metrics/commit.py
Expand Up @@ -6,9 +6,9 @@
import datetime
import sqlalchemy as s
import pandas as pd
from augur.api.util import register_metric
from flask import current_app

from ..server import engine
from augur.api.util import register_metric

@register_metric()
def committers(repo_group_id, repo_id=None, begin_date=None, end_date=None, period='month'):
Expand Down Expand Up @@ -90,7 +90,7 @@ def committers(repo_group_id, repo_id=None, begin_date=None, end_date=None, peri
"""
)

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(committersSQL, conn, params={'repo_id': repo_id,
'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date, 'period':period})

Expand Down Expand Up @@ -168,7 +168,7 @@ def annual_commit_count_ranked_by_new_repo_in_repo_group(repo_group_id, repo_id=
ORDER BY YEAR ASC
""".format(table, period))

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(cdRgNewrepRankedCommitsSQL, conn, params={'repo_id': repo_id,
'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date})
return results
Expand Down Expand Up @@ -267,7 +267,7 @@ def annual_commit_count_ranked_by_repo_in_repo_group(repo_group_id, repo_id=None
LIMIT 10
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(cdRgTpRankedCommitsSQL, conn, params={ "repo_group_id": repo_group_id,
"repo_id": repo_id})
return results
Expand Down Expand Up @@ -299,7 +299,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY patches DESC) a
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(total_commits_SQL, conn,
params={'year': year, 'repo_group_id': repo_group_id})
else:
Expand All @@ -312,7 +312,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY patches DESC) a
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(total_commits_SQL, conn,
params={'year': year, 'repo_id': repo_id})

Expand All @@ -339,7 +339,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY commits DESC
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(committers_SQL, conn,
params={'year': year, 'repo_group_id': repo_group_id})
else:
Expand All @@ -359,7 +359,7 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY commits DESC
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(committers_SQL, conn,
params={'year': year, 'repo_id': repo_id})

Expand Down
20 changes: 10 additions & 10 deletions augur/api/metrics/contributor.py
Expand Up @@ -6,10 +6,10 @@
import datetime
import sqlalchemy as s
import pandas as pd
from flask import current_app

from augur.api.util import register_metric
import uuid

from ..server import engine

@register_metric()
def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None):
Expand Down Expand Up @@ -125,7 +125,7 @@ def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end
ORDER BY total DESC
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(contributorsSQL, conn, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
else:
Expand Down Expand Up @@ -212,7 +212,7 @@ def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end
ORDER BY total DESC
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(contributorsSQL, conn, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
return results
Expand Down Expand Up @@ -283,7 +283,7 @@ def contributors_new(repo_group_id, repo_id=None, period='day', begin_date=None,
GROUP BY date, repo.repo_id, repo_name
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(contributorsNewSQL, conn, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
else:
Expand Down Expand Up @@ -333,7 +333,7 @@ def contributors_new(repo_group_id, repo_id=None, period='day', begin_date=None,
GROUP BY date, repo.repo_id, repo_name
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(contributorsNewSQL, conn, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
return results
Expand All @@ -355,7 +355,7 @@ def lines_changed_by_author(repo_group_id, repo_id=None):
GROUP BY commits.repo_id, date_trunc('week', cmt_author_date::date), cmt_author_affiliation, cmt_author_email, repo_name
ORDER BY date_trunc('week', cmt_author_date::date) ASC;
""")
with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(linesChangedByAuthorSQL, conn, params={"repo_id": repo_id})
return results
else:
Expand All @@ -367,7 +367,7 @@ def lines_changed_by_author(repo_group_id, repo_id=None):
GROUP BY repo_id, date_trunc('week', cmt_author_date::date), cmt_author_affiliation, cmt_author_email
ORDER BY date_trunc('week', cmt_author_date::date) ASC;
""")
with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(linesChangedByAuthorSQL, conn, params={"repo_group_id": repo_group_id})
return results

Expand Down Expand Up @@ -426,7 +426,7 @@ def contributors_code_development(repo_group_id, repo_id=None, period='all', beg
GROUP BY a.email, a.repo_id, repo_name
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(contributorsSQL, conn, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
else:
Expand Down Expand Up @@ -462,7 +462,7 @@ def contributors_code_development(repo_group_id, repo_id=None, period='all', beg
ORDER BY commits desc, email
""")

with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(contributorsSQL, conn, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
return results
13 changes: 7 additions & 6 deletions augur/api/metrics/deps.py
Expand Up @@ -5,10 +5,11 @@

import sqlalchemy as s
import pandas as pd
from augur.api.util import register_metric
import datetime
from flask import current_app

from augur.api.util import register_metric

from ..server import engine

@register_metric()
def deps(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=None):
Expand Down Expand Up @@ -46,8 +47,8 @@ def deps(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=No
AND repo_dependencies.repo_id = :repo_id
""")

with engine.connect() as conn:
results = pd.read_sql(depsSQL, conn)
with current_app.engine.connect() as conn:
results = pd.read_sql(depsSQL, conn, params={'repo_id': repo_id})

else:

Expand All @@ -71,8 +72,8 @@ def deps(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=No
AND repo.repo_group_id = :repo_group_id
""")

with engine.connect() as conn:
results = pd.read_sql(depsSQL, conn)
with current_app.engine.connect() as conn:
results = pd.read_sql(depsSQL, conn, params={'repo_group_id': repo_group_id})
return results


Expand Down
4 changes: 2 additions & 2 deletions augur/api/metrics/insight.py
Expand Up @@ -7,7 +7,7 @@
import pandas as pd
from augur.api.util import register_metric

from ..server import engine
from flask import current_app

@register_metric(type="repo_group_only")
def top_insights(repo_group_id, num_repos=6):
Expand All @@ -29,6 +29,6 @@ def top_insights(repo_group_id, num_repos=6):
LIMIT :num_repos
)
""")
with engine.connect() as conn:
with current_app.engine.connect() as conn:
results = pd.read_sql(topInsightsSQL, conn, params={'repo_group_id': repo_group_id, 'num_repos': num_repos})
return results

0 comments on commit f919e92

Please sign in to comment.