Skip to content

Commit

Permalink
test: improve testing (DEV-152) (#110)
Browse files Browse the repository at this point in the history
* improve code structure

* improve and activate user tests

* remove code smells in user tests

* remove remnants of id_to_iri feature

* improve user test

* fix typo in Makefile

* Update test_user.py

* fix failing list tests

* fix typo

* add more comments to lists test data

* fix failing list node test

* improve test_listnode

* improve test_connection

* improve test_group

* improve test_ontology

* improve test_project

* improve test_propertyclass

* improve test_resource

* improve test_resourceclass

* improve test_tools

* move unit tests to separate folder

* Update Makefile

* update GitHub CI actions

* Update test.yml

* fix code smells in test_langstring

* fix failing GitHub action

* improve code

* remove print statements
  • Loading branch information
irinaschubert committed Nov 4, 2021
1 parent c5079f7 commit 2e9af2a
Show file tree
Hide file tree
Showing 33 changed files with 1,381 additions and 1,363 deletions.
70 changes: 32 additions & 38 deletions .github/workflows/daily-test.yml
Expand Up @@ -5,41 +5,35 @@ on:
- cron: '0 7 * * *'

jobs:
test-integration:
name: Integration Tests
runs-on: ubuntu-latest
steps:
# run tests
- name: Checkout source
uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Set up JDK
uses: joschi/setup-jdk@v2
with:
java-version: '11' # The OpenJDK version to make available on the path
architecture: 'x64' # defaults to 'x64'
- name: Set up Node
uses: actions/setup-node@v1
with:
node-version: '12'
- name: Install bazel
run: |
sudo apt-get install ca-certificates-java expect
npm install --global @bazel/bazelisk
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Upgrade pip and install requirements
run: |
python3 -m pip install --upgrade pip
make upgrade-dist-tools
make install-requirements
- name: run test-integration
run: |
make install
make test
- name: build docs
run: |
make build-docs
tests:
name: daily-tests
runs-on: ubuntu-latest
steps:
- name: Checkout source
uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Set up JDK
uses: joschi/setup-jdk@v2
with:
java-version: '11' # The OpenJDK version to make available on the path
architecture: 'x64' # defaults to 'x64'
- name: Set up Node
uses: actions/setup-node@v1
with:
node-version: '12'
- name: Install dependencies
run: |
sudo apt-get install ca-certificates-java expect
npm install --global @bazel/bazelisk
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Install dependencies
run: |
make upgrade-dist-tools
make install-requirements
make install
- name: Run e2e tests
run: make test-end-to-end
25 changes: 10 additions & 15 deletions .github/workflows/test.yml
Expand Up @@ -6,11 +6,13 @@ on:
types: [opened]

jobs:
test-integration:
name: Integration Tests
tests:
name: tests
runs-on: ubuntu-latest
strategy:
matrix:
target: [ 'test-end-to-end', 'test-unittests', 'docs-build' ]
steps:
# run tests
- name: Checkout source
uses: actions/checkout@v2
with:
Expand All @@ -34,15 +36,8 @@ jobs:
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r requirements.txt
- name: run test-integration
run: |
make upgrade-dist-tools
make install-requirements
make install
make test
- name: build docs
run: |
make install-requirements
make build-docs
make upgrade-dist-tools
make install-requirements
make install
- name: Run tests
run: make ${{ matrix.target }}
61 changes: 35 additions & 26 deletions Makefile
Expand Up @@ -6,70 +6,79 @@ CURRENT_DIR := $(shell dirname $(realpath $(firstword $(MAKEFILE_LIST))))
include vars.mk

#################################
# Integration test targets
# Make targets for dsp-tools
#################################

# Clones the knora-api git repository
.PHONY: clone-knora-stack
clone-knora-stack:
@git clone --branch main --single-branch --depth 1 https://github.com/dasch-swiss/knora-api.git $(CURRENT_DIR)/.tmp/knora-stack
.PHONY: clone-dsp-repo
clone-dsp-repo: ## clone the dsp-api git repository
@git clone --branch main --single-branch --depth 1 https://github.com/dasch-swiss/dsp-api.git $(CURRENT_DIR)/.tmp/dsp-stack

.PHONY: knora-stack
knora-stack: ## runs the knora-stack
$(MAKE) -C $(CURRENT_DIR)/.tmp/knora-stack env-file
$(MAKE) -C $(CURRENT_DIR)/.tmp/knora-stack stack-down-delete-volumes
$(MAKE) -C $(CURRENT_DIR)/.tmp/knora-stack init-db-test
$(MAKE) -C $(CURRENT_DIR)/.tmp/knora-stack stack-up
$(MAKE) -C $(CURRENT_DIR)/.tmp/knora-stack stack-logs-api-no-follow
.PHONY: dsp-stack
dsp-stack: ## run the dsp-stack (deletes existing volumes first)
$(MAKE) -C $(CURRENT_DIR)/.tmp/dsp-stack env-file
$(MAKE) -C $(CURRENT_DIR)/.tmp/dsp-stack stack-down-delete-volumes
$(MAKE) -C $(CURRENT_DIR)/.tmp/dsp-stack init-db-test
$(MAKE) -C $(CURRENT_DIR)/.tmp/dsp-stack stack-up
$(MAKE) -C $(CURRENT_DIR)/.tmp/dsp-stack stack-logs-api-no-follow

.PHONY: dist
dist: ## generate distribution package
python3 setup.py sdist bdist_wheel

.PHONY: upload
upload: ## upload distribution package to PyPi
upload: ## upload distribution package to PyPI
python3 -m twine upload dist/*

.PHONY: upgrade-dist-tools
upgrade-dist-tool: ## upgrade packages necessary for testing, building, packaging and uploading to PyPi
python3 -m pip install --upgrade pip setuptools wheel tqdm twine pytest mkdocs mkdocs
upgrade-dist-tool: ## upgrade packages necessary for testing, building, packaging and uploading to PyPI
python3 -m pip install --upgrade pip setuptools wheel tqdm twine pytest mkdocs

.PHONY: build-docs
build-docs: ## build docs into the local 'site' folder
.PHONY: docs-build
docs-build: ## build docs into the local 'site' folder
mkdocs build

.PHONY: serve-docs
serve-docs: ## serve docs for local viewing
.PHONY: docs-serve
docs-serve: ## serve docs for local viewing
mkdocs serve

.PHONY: publish-docs
publish-docs: ## build and publish docs to Github Pages
.PHONY: docs-publish
docs-publish: ## build and publish docs to GitHub Pages
mkdocs gh-deploy

.PHONY: install-requirements
install-requirements: ## install requirements
python3 -m pip install --upgrade pip
pip3 install -r requirements.txt
pip3 install -r docs/requirements.txt

.PHONY: install
install: ## install from source
install: ## install from source (runs setup.py)
pip3 install .

.PHONY: test
test: clean local-tmp clone-knora-stack knora-stack ## runs all tests
test: clean local-tmp clone-dsp-repo dsp-stack ## run all tests
# to run only one test, replace //test/... with p.ex. //test/e2e:test_tools
bazel test --test_summary=detailed --test_output=all //test/...

.PHONY: test-end-to-end
test-end-to-end: clean local-tmp clone-dsp-repo dsp-stack ## run e2e tests
bazel test --test_summary=detailed --test_output=all //test/e2e/...

.PHONY: test-unittests
test-unittests: ## run unit tests
bazel test --test_summary=detailed --test_output=all //test/unittests/...

.PHONY: local-tmp
local-tmp:
local-tmp: ## create local .tmp folder
@mkdir -p $(CURRENT_DIR)/.tmp

.PHONY: clean
clean: ## cleans the project directory
clean: ## clean local project directories
@rm -rf $(CURRENT_DIR)/.tmp
@rm -rf dist/ build/ site/ knora.egg-info/

.PHONY: help
help: ## this help
help: ## show this help
@awk 'BEGIN {FS = ":.*?## "} /^[a-zA-Z_-]+:.*?## / {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST) | sort

.PHONY: run
Expand Down
4 changes: 2 additions & 2 deletions knora/dsplib/models/langstring.py
Expand Up @@ -65,7 +65,7 @@ def mymapper(p: Tuple[Union[Languages, str], str]) -> Tuple[Languages, str]:
elif isinstance(p[0], Languages):
lang = p[0]
else:
raise BaseError("No a valid language definition!")
raise BaseError("Not a valid language definition!")
return lang, p[1]

if initvalue is None:
Expand All @@ -81,7 +81,7 @@ def mymapper(p: Tuple[Union[Languages, str], str]) -> Tuple[Languages, str]:
self._simplestring = initvalue._simplestring
self._langstrs = initvalue._langstrs
else:
raise BaseError("No a valid language definition!")
raise BaseError("Not a valid language definition!")

def __getitem__(self, key: Optional[Union[Languages, str]] = None) -> str:
#
Expand Down
6 changes: 3 additions & 3 deletions knora/dsplib/models/listnode.py
Expand Up @@ -7,7 +7,7 @@

from .connection import Connection
from .helpers import Actions, BaseError
from .langstring import Languages, LangStringParam, LangString
from .langstring import Languages, LangString
from .model import Model
from .project import Project

Expand Down Expand Up @@ -148,8 +148,8 @@ def __init__(self,
con: Connection,
id: Optional[str] = None,
project: Optional[Union[Project, str]] = None,
label: LangStringParam = None,
comments: LangStringParam = None,
label: LangString = None,
comments: LangString = None,
name: Optional[str] = None,
parent: Optional[Union['ListNode', str]] = None,
isRootNode: Optional[bool] = None,
Expand Down
2 changes: 1 addition & 1 deletion knora/dsplib/models/project.py
Expand Up @@ -122,7 +122,7 @@ def __init__(self,
shortcode: Optional[str] = None,
shortname: Optional[str] = None,
longname: Optional[str] = None,
description: LangStringParam = None,
description: LangString = None,
keywords: Optional[Set[str]] = None,
ontologies: Optional[Set[str]] = None,
selfjoin: Optional[bool] = None,
Expand Down
3 changes: 1 addition & 2 deletions knora/dsplib/models/user.py
Expand Up @@ -146,7 +146,7 @@ class User(Model):
_rm_from_project: Dict[str, bool]
_add_to_group: Set[str]
_rm_from_group: Set[str]
_change_admin: Set[str]
_change_admin: Dict[str, bool]

def __init__(self,
con: Connection,
Expand Down Expand Up @@ -641,7 +641,6 @@ def update(self, requesterPassword: Optional[str] = None) -> Any:
'/admin/users/iri/' + quote_plus(self._id) + '/project-admin-memberships/' + quote_plus(p[0]))

for p in self._add_to_group:
print('/admin/users/iri/' + quote_plus(self._id) + '/group-memberships/' + quote_plus(p))
result = self._con.post('/admin/users/iri/' + quote_plus(self._id) + '/group-memberships/' + quote_plus(p))
for p in self._rm_from_group:
result = self._con.delete(
Expand Down
28 changes: 15 additions & 13 deletions knora/dsplib/utils/excel_to_json_lists.py
Expand Up @@ -5,7 +5,7 @@
import os
import re
import unicodedata
from typing import List
from typing import List, Dict

import jsonschema
from jsonschema import validate
Expand Down Expand Up @@ -39,8 +39,8 @@ def get_values_from_excel(excelfiles: List[str], base_file: str, parentnode: {},
cell = worksheet.cell(column=col, row=row)

if col > 1:
# append the cell value of the parent node (which is one value to the left of the actual cell) to the list of previous
# values
# append the cell value of the parent node (which is one value to the left of the actual cell) to the list of
# previous values
preval.append(worksheet.cell(column=col - 1, row=row).value)

while cell.value:
Expand Down Expand Up @@ -126,8 +126,8 @@ def make_json_list_from_excel(rootnode: {}, excelfiles: List[str]) -> None:
startrow = 1
startcol = 1

# Check if English file is available and take it as base file, take last one from list of Excel files if English is not
# available. The node names are later derived from the labels of the base file.
# Check if English file is available and take it as base file, take last one from list of Excel files if English
# is not available. The node names are later derived from the labels of the base file.
base_file = ''

for filename in excelfiles:
Expand Down Expand Up @@ -206,13 +206,14 @@ def check_language_code(lang_code: str) -> bool:
return False


def make_root_node_from_args(excelfiles: List[str], listname_from_args: str) -> dict:
def make_root_node_from_args(excelfiles: List[str], listname_from_args: str, comments: Dict[str, str]) -> dict:
"""
Creates the root node for the JSON list
Args:
excelfiles: List of Excel files (names) to be checked
listname_from_args: Listname from arguments provided by the user via the command line
comments: Comments provided by the ontology
Returns:
dict: The root node of the list as dictionary (JSON)
Expand Down Expand Up @@ -245,7 +246,7 @@ def make_root_node_from_args(excelfiles: List[str], listname_from_args: str) ->
if listname_from_args:
listname = listname_from_args

rootnode = {'name': listname, 'labels': rootnode_labels_dict}
rootnode = {'name': listname, 'labels': rootnode_labels_dict, 'comments': comments}

return rootnode

Expand Down Expand Up @@ -274,13 +275,14 @@ def validate_list_with_schema(json_list: str) -> bool:
return True


def prepare_list_creation(excelfolder: str, listname: str):
def prepare_list_creation(excelfolder: str, listname: str, comments: dict):
"""
Gets the excelfolder parameter and checks the validity of the files. It then makes the root node for the list.
Args:
excelfolder: path to the folder containing the Excel file(s)
listname: name of the list to be created
comments: comments for the list to be created
Returns:
rootnode (dict): The rootnode of the list as a dictionary
Expand All @@ -296,7 +298,7 @@ def prepare_list_creation(excelfolder: str, listname: str):
# check if the given folder parameter is actually a folder
if not os.path.isdir(excelfolder):
print(excelfolder, 'is not a directory.')
exit()
exit(1)

# create a list with all excel files from the path provided by the user
excel_files = [filename for filename in glob.iglob(f'{excelfolder}/*.xlsx') if
Expand All @@ -308,10 +310,10 @@ def prepare_list_creation(excelfolder: str, listname: str):
print(file)
if not os.path.isfile(file):
print(file, 'is not a valid file.')
exit()
exit(1)

# create root node of list
rootnode = make_root_node_from_args(excel_files, listname)
rootnode = make_root_node_from_args(excel_files, listname, comments)

return rootnode, excel_files

Expand All @@ -328,8 +330,8 @@ def list_excel2json(listname: str, excelfolder: str, outfile: str):
Return:
None
"""
# get the Excel files from the folder and crate the rootnode of the list
rootnode, excel_files = prepare_list_creation(excelfolder, listname)
# get the Excel files from the folder and create the rootnode of the list
rootnode, excel_files = prepare_list_creation(excelfolder, listname, comments={})

# create the list from the Excel files
make_json_list_from_excel(rootnode, excel_files)
Expand Down

0 comments on commit 2e9af2a

Please sign in to comment.