Skip to content

Commit

Permalink
Merge pull request #2577 from chaoss/dev
Browse files Browse the repository at this point in the history
Version 0.60.2 Release
  • Loading branch information
sgoggins committed Nov 1, 2023
2 parents 4434e98 + afbcb3a commit bf08907
Show file tree
Hide file tree
Showing 51 changed files with 802 additions and 410 deletions.
4 changes: 2 additions & 2 deletions README.md
@@ -1,4 +1,4 @@
# Augur NEW Release v0.60.0
# Augur NEW Release v0.60.2

[![first-timers-only](https://img.shields.io/badge/first--timers--only-friendly-blue.svg?style=flat-square)](https://www.firsttimersonly.com/) We follow the [First Timers Only](https://www.firsttimersonly.com/) philosophy of tagging issues for first timers only, and walking one newcomer through the resolution process weekly. [You can find these issues tagged with "first timers only" on our issues list.](https://github.com/chaoss/augur/labels/first-timers-only).

Expand All @@ -7,7 +7,7 @@
## NEW RELEASE ALERT!
### [If you want to jump right in, updated docker build/compose and bare metal installation instructions are available here](docs/new-install.md)

Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.60.0
Augur is now releasing a dramatically improved new version to the main branch. It is also available here: https://github.com/chaoss/augur/releases/tag/v0.60.2
- The `main` branch is a stable version of our new architecture, which features:
- Dramatic improvement in the speed of large scale data collection (100,000+ repos). All data is obtained for 100k+ repos within 2 weeks.
- A new job management architecture that uses Celery and Redis to manage queues, and enables users to run a Flower job monitoring dashboard
Expand Down
3 changes: 2 additions & 1 deletion augur/api/metrics/README.md
Expand Up @@ -26,7 +26,8 @@ from augur.application.db.engine import engine
4. Define any queries with the structure show below
```py
repo_sql = s.sql.text(""" SELECT repo.repo_name FROM repo WHERE repo.repo_id = :repo_id """)
results = pd.read_sql(repo_sql, engine, params={'repo_id': repo_id})
with engine.connect() as conn:
results = pd.read_sql(repo_sql, conn, params={'repo_id': repo_id})
```
5. Return either a pandas dataframe, dict, or json.
- Note: If you return a pandas dataframe or dict it will be automatically converted into json
Expand Down
35 changes: 21 additions & 14 deletions augur/api/metrics/commit.py
Expand Up @@ -90,8 +90,9 @@ def committers(repo_group_id, repo_id=None, begin_date=None, end_date=None, peri
"""
)

results = pd.read_sql(committersSQL, engine, params={'repo_id': repo_id,
'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date, 'period':period})
with engine.connect() as conn:
results = pd.read_sql(committersSQL, conn, params={'repo_id': repo_id,
'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date, 'period':period})

return results

Expand Down Expand Up @@ -167,8 +168,9 @@ def annual_commit_count_ranked_by_new_repo_in_repo_group(repo_group_id, repo_id=
ORDER BY YEAR ASC
""".format(table, period))

results = pd.read_sql(cdRgNewrepRankedCommitsSQL, engine, params={'repo_id': repo_id,
'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date})
with engine.connect() as conn:
results = pd.read_sql(cdRgNewrepRankedCommitsSQL, conn, params={'repo_id': repo_id,
'repo_group_id': repo_group_id,'begin_date': begin_date, 'end_date': end_date})
return results

@register_metric()
Expand Down Expand Up @@ -265,8 +267,9 @@ def annual_commit_count_ranked_by_repo_in_repo_group(repo_group_id, repo_id=None
LIMIT 10
""")

results = pd.read_sql(cdRgTpRankedCommitsSQL, engine, params={ "repo_group_id": repo_group_id,
"repo_id": repo_id})
with engine.connect() as conn:
results = pd.read_sql(cdRgTpRankedCommitsSQL, conn, params={ "repo_group_id": repo_group_id,
"repo_id": repo_id})
return results

@register_metric()
Expand Down Expand Up @@ -296,8 +299,9 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY patches DESC) a
""")

results = pd.read_sql(total_commits_SQL, engine,
params={'year': year, 'repo_group_id': repo_group_id})
with engine.connect() as conn:
results = pd.read_sql(total_commits_SQL, conn,
params={'year': year, 'repo_group_id': repo_group_id})
else:
total_commits_SQL = s.sql.text("""
SELECT SUM(patches)::int
Expand All @@ -308,8 +312,9 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY patches DESC) a
""")

results = pd.read_sql(total_commits_SQL, engine,
params={'year': year, 'repo_id': repo_id})
with engine.connect() as conn:
results = pd.read_sql(total_commits_SQL, conn,
params={'year': year, 'repo_id': repo_id})

if not results.iloc[0]['sum']:
return pd.DataFrame()
Expand All @@ -334,8 +339,9 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY commits DESC
""")

results = pd.read_sql(committers_SQL, engine,
params={'year': year, 'repo_group_id': repo_group_id})
with engine.connect() as conn:
results = pd.read_sql(committers_SQL, conn,
params={'year': year, 'repo_group_id': repo_group_id})
else:
committers_SQL = s.sql.text("""
SELECT
Expand All @@ -353,8 +359,9 @@ def top_committers(repo_group_id, repo_id=None, year=None, threshold=0.8):
ORDER BY commits DESC
""")

results = pd.read_sql(committers_SQL, engine,
params={'year': year, 'repo_id': repo_id})
with engine.connect() as conn:
results = pd.read_sql(committers_SQL, conn,
params={'year': year, 'repo_id': repo_id})

cumsum = 0
for i, row in results.iterrows():
Expand Down
36 changes: 22 additions & 14 deletions augur/api/metrics/contributor.py
Expand Up @@ -125,8 +125,9 @@ def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end
ORDER BY total DESC
""")

results = pd.read_sql(contributorsSQL, engine, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
with engine.connect() as conn:
results = pd.read_sql(contributorsSQL, conn, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
else:
contributorsSQL = s.sql.text("""
SELECT id::text AS user_id,
Expand Down Expand Up @@ -211,8 +212,9 @@ def contributors(repo_group_id, repo_id=None, period='day', begin_date=None, end
ORDER BY total DESC
""")

results = pd.read_sql(contributorsSQL, engine, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
with engine.connect() as conn:
results = pd.read_sql(contributorsSQL, conn, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
return results

@register_metric()
Expand Down Expand Up @@ -281,8 +283,9 @@ def contributors_new(repo_group_id, repo_id=None, period='day', begin_date=None,
GROUP BY date, repo.repo_id, repo_name
""")

results = pd.read_sql(contributorsNewSQL, engine, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
with engine.connect() as conn:
results = pd.read_sql(contributorsNewSQL, conn, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
else:
contributorsNewSQL = s.sql.text("""
SELECT date_trunc(:period, b.created_at::DATE) AS date, COUNT(id) AS new_contributors, repo.repo_id, repo_name
Expand Down Expand Up @@ -330,8 +333,9 @@ def contributors_new(repo_group_id, repo_id=None, period='day', begin_date=None,
GROUP BY date, repo.repo_id, repo_name
""")

results = pd.read_sql(contributorsNewSQL, engine, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
with engine.connect() as conn:
results = pd.read_sql(contributorsNewSQL, conn, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
return results

@register_metric()
Expand All @@ -351,7 +355,8 @@ def lines_changed_by_author(repo_group_id, repo_id=None):
GROUP BY commits.repo_id, date_trunc('week', cmt_author_date::date), cmt_author_affiliation, cmt_author_email, repo_name
ORDER BY date_trunc('week', cmt_author_date::date) ASC;
""")
results = pd.read_sql(linesChangedByAuthorSQL, engine, params={"repo_id": repo_id})
with engine.connect() as conn:
results = pd.read_sql(linesChangedByAuthorSQL, conn, params={"repo_id": repo_id})
return results
else:
linesChangedByAuthorSQL = s.sql.text("""
Expand All @@ -362,7 +367,8 @@ def lines_changed_by_author(repo_group_id, repo_id=None):
GROUP BY repo_id, date_trunc('week', cmt_author_date::date), cmt_author_affiliation, cmt_author_email
ORDER BY date_trunc('week', cmt_author_date::date) ASC;
""")
results = pd.read_sql(linesChangedByAuthorSQL, engine, params={"repo_group_id": repo_group_id})
with engine.connect() as conn:
results = pd.read_sql(linesChangedByAuthorSQL, conn, params={"repo_group_id": repo_group_id})
return results

@register_metric()
Expand Down Expand Up @@ -420,8 +426,9 @@ def contributors_code_development(repo_group_id, repo_id=None, period='all', beg
GROUP BY a.email, a.repo_id, repo_name
""")

results = pd.read_sql(contributorsSQL, engine, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
with engine.connect() as conn:
results = pd.read_sql(contributorsSQL, conn, params={'repo_id': repo_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
else:
contributorsSQL = s.sql.text("""
SELECT
Expand Down Expand Up @@ -455,6 +462,7 @@ def contributors_code_development(repo_group_id, repo_id=None, period='all', beg
ORDER BY commits desc, email
""")

results = pd.read_sql(contributorsSQL, engine, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
with engine.connect() as conn:
results = pd.read_sql(contributorsSQL, conn, params={'repo_group_id': repo_group_id, 'period': period,
'begin_date': begin_date, 'end_date': end_date})
return results
7 changes: 5 additions & 2 deletions augur/api/metrics/deps.py
Expand Up @@ -6,6 +6,7 @@
import sqlalchemy as s
import pandas as pd
from augur.api.util import register_metric
import datetime

from ..server import engine

Expand Down Expand Up @@ -45,7 +46,8 @@ def deps(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=No
AND repo_dependencies.repo_id = :repo_id
""")

results = pd.read_sql(depsSQL, engine)
with engine.connect() as conn:
results = pd.read_sql(depsSQL, conn)

else:

Expand All @@ -69,7 +71,8 @@ def deps(repo_group_id, repo_id=None, period='day', begin_date=None, end_date=No
AND repo.repo_group_id = :repo_group_id
""")

results = pd.read_sql(depsSQL, engine)
with engine.connect() as conn:
results = pd.read_sql(depsSQL, conn)
return results


Expand Down
3 changes: 2 additions & 1 deletion augur/api/metrics/insight.py
Expand Up @@ -29,5 +29,6 @@ def top_insights(repo_group_id, num_repos=6):
LIMIT :num_repos
)
""")
results = pd.read_sql(topInsightsSQL, engine, params={'repo_group_id': repo_group_id, 'num_repos': num_repos})
with engine.connect() as conn:
results = pd.read_sql(topInsightsSQL, conn, params={'repo_group_id': repo_group_id, 'num_repos': num_repos})
return results

0 comments on commit bf08907

Please sign in to comment.