diff --git a/.github/workflows/check_deps.py b/.github/workflows/check_deps.py new file mode 100644 index 0000000..0fb542a --- /dev/null +++ b/.github/workflows/check_deps.py @@ -0,0 +1,107 @@ +"""Script to check that `requirements.txt` and `environment.yml` are synced. +This script requires `pyyaml` to read `environment.yml`. It checks that all +packages listed as dependencies in it are also present in one of +`requirements.txt` or `extra_libraries.txt`. +As there might be some discrepency between package names in `pip` and `conda`, +one can add the name of the corresponding conda package as a comment on the +same line as a requirement in `requirements.txt` to ensure proper matching. +For instance, if one add the following line in `requirements.txt` +> tensorflow-gpu # tensorflow +it will match a dependency `tensorflow` in the `environment.yml`. +""" + +import yaml + + +def preprocess_pip_deps(lines): + + deps = [] + for dep in lines: + dep = dep.strip() + if len(dep) == 0 or dep.startswith('#'): + continue + + # If there is a comment on the same line use this to declare compat + # with conda install name. Note that if two packages are to be + # installed with conda, they can be added on the same line separated + # with a space. + deps.extend(dep.split('#')[-1].strip().split()) + return deps + + +def assert_same_deps(deps_pip, deps_conda): + "Check the two dependencies are the same with an explicit error message." + deps_pip = set(deps_pip) + deps_conda = set(deps_conda) - {'pip'} + # For requirements fetched via git, need to add parsing: + # environment.yml uses git+https://, requirements.txt uses git+git:// + deps_pip = fix_req_set(deps_pip) + deps_conda = fix_req_set(deps_conda) + + missing = deps_pip.symmetric_difference(deps_conda) + + assert len(missing) == 0, ( + f"Missing dependency {deps_pip.difference(deps_conda)} in `environment.yml` and " + f"dependencies {deps_conda.difference(deps_pip)} in `requirements.txt`" + ) + return + + +def fix_req_set(req_set: set): + ''' + Parses through the input set and replaces entries starting with 'git+git://' or 'git+https://' with entries with + those fields removed. + Parameters + ---------- + req_set : set + Set containing the requirements to fix. + + Returns + ------- + set + Set with entries starting with 'git+git://' or 'git+https://' removed. + ''' + returned_set = set() + for req in req_set: + returned_set.add(remove_git_prefix(req)) + return returned_set + + +def remove_git_prefix(req_name: str) -> str: + ''' + For strings starting with either git+git:// or git+https://, remove these and return the remainder. + Parameters + ---------- + req_name : str + String from which to remove the git prefix, if present. + + Returns + ------- + str + String without the git prefix. + ''' + if(req_name.startswith('git+')): + # More generally, can use req_name[req_name.index('//')+2:], but would + # do more than is stated in the docstring. + if(req_name.startswith('git+git://')): + return req_name.replace('git+git://', '') + elif(req_name.startswith('git+https://')): + return req_name.replace('git+https://', '') + return req_name + + +if __name__ == '__main__': + + # Load deps from envrionment.yml + with open('environment.yml') as f: + conf = yaml.load(f, Loader=yaml.FullLoader) + + deps_conda = conf['dependencies'] + deps_conda = deps_conda[:-1] + deps_conda[-1]['pip'] + + deps_pip = [] + for requirement_file in ['requirements.txt', 'extra_libraries.txt']: + with open(requirement_file) as f: + deps_pip += preprocess_pip_deps(f.readlines()) + + assert_same_deps(deps_pip, deps_conda) diff --git a/.github/workflows/install.yml b/.github/workflows/install.yml new file mode 100644 index 0000000..12c22d5 --- /dev/null +++ b/.github/workflows/install.yml @@ -0,0 +1,53 @@ +name: Check Extra Install + +on: + push: + branches: + - '**' + create: + branches: + - 'master' + tags: + - '**' + +jobs: + pip_install: + name: Check extra install with pip + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + + - name: Pip install extra_libraries + run: pip install -r extra_libraries.txt + + conda_install: + name: Check extra install with conda + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - uses: s-weigand/setup-conda@v1 + - name: Install mamba + run: conda install -y -c conda-forge mamba + - name: Conda install environment.yml + run: mamba env create -f environment.yml -n test_env + + check_same_deps: + name: Check pip and conda install same env + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Setup Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install dependencies + run: pip install pyyaml + - name: Check same requirements + run: python .github/workflows/check_deps.py diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml new file mode 100644 index 0000000..3d69825 --- /dev/null +++ b/.github/workflows/testing.yml @@ -0,0 +1,54 @@ +name: 'build' +on: + push: + branches: + - master + - main + pull_request: + branches: + - master + - main + +jobs: + build_conda: + name: RAMP tests + runs-on: "ubuntu-latest" + defaults: + run: + shell: bash -l {0} + steps: + - uses: actions/checkout@v2 + - uses: conda-incubator/setup-miniconda@v2 + with: + activate-environment: anaconda-client-env + environment-file: environment.yml + python-version: 3.8 + auto-activate-base: false + - run: | + export RAMP_TEST_MODE=true # to have download_data.py just download the dummy dataset + python download_data.py + ramp-test --quick-test + build_pip: + name: Linux pip + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install flake8 nbconvert[test] + if [ -f requirements.txt ]; then pip install -r requirements.txt; fi + pip install git+https://github.com/AlexandreHutton/stroke + - name: Test + run: | + flake8 *.py submissions/*/*.py + export RAMP_TEST_MODE=true # to have download_data.py just download the dummy dataset + python download_data.py + ramp-test --quick-test + ramp-test --submission sample --quick-test + jupyter nbconvert --execute stroke_lesions_starting_kit.ipynb --to html --ExecutePreprocessor.kernel_name=python3