Skip to content

Commit af6c8f8

Browse files
authored
Merge branch 'main' into feature-lifting-expander
2 parents 91ccc90 + 193fe77 commit af6c8f8

File tree

53 files changed

+4391
-198
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

53 files changed

+4391
-198
lines changed

.github/workflows/lint.yml

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,6 @@ jobs:
1111
runs-on: ubuntu-latest
1212
steps:
1313
- uses: actions/checkout@v3
14-
- uses: chartboost/ruff-action@v1
14+
- uses: chartboost/ruff-action@v1
15+
with:
16+
src: './modules'

.github/workflows/test_codebase.yml

Lines changed: 18 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -27,34 +27,31 @@ jobs:
2727
fail-fast: false
2828
matrix:
2929
os: [ubuntu-latest]
30-
python-version: ["3.10", "3.11"]
31-
torch-version: [2.0.1]
32-
include:
33-
- torch-version: 2.0.1
30+
python-version: [3.11.3]
3431

3532
steps:
36-
- uses: actions/checkout@v3
37-
- name: Set up Python ${{ matrix.python-version }}
38-
uses: actions/setup-python@v4
33+
- uses: actions/checkout@v4
34+
- name: Set up Python ${{matrix.python-version}}
35+
uses: actions/setup-python@v5
3936
with:
40-
python-version: ${{ matrix.python-version }}
41-
cache: "pip"
42-
cache-dependency-path: '**/pyproject.toml'
37+
python-version: ${{matrix.python-version}}
4338

44-
- name: Install PyTorch ${{ matrix.torch-version }}+cpu
45-
run: |
46-
pip install --upgrade pip setuptools wheel
47-
pip install torch==${{ matrix.torch-version}} --extra-index-url https://download.pytorch.org/whl/cpu
48-
pip install torch-scatter -f https://data.pyg.org/whl/torch-${{ matrix.torch-version }}+cpu.html
49-
pip install torch-sparse -f https://data.pyg.org/whl/torch-${{ matrix.torch-version }}+cpu.html
50-
pip install torch-cluster -f https://data.pyg.org/whl/torch-${{ matrix.torch-version }}+cpu.html
51-
pip show pip
52-
- name: Install main package
39+
- uses: actions/cache@v4
40+
with:
41+
path: ~/.cache/pip
42+
key: ${{matrix.os}}-${{matrix.python-version}}-${{ hashFiles('pyproject.toml') }}
43+
44+
- name: Install dependencies
5345
run: |
54-
pip install -e .[all]
55-
- name: Run tests for codebase [pytest]
46+
python -m pip install --upgrade pip
47+
pip install pytest
48+
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
49+
source env_setup.sh
50+
51+
- name: Test with pytest
5652
run: |
5753
pytest -n 2 --cov --cov-report=xml:coverage.xml test/transforms/feature_liftings test/transforms/liftings
54+
pytest test/tutorials/test_tutorials.py
5855
- name: Upload coverage
5956
uses: codecov/codecov-action@v3
6057
with:

.github/workflows/test_tutorials.yml

Lines changed: 0 additions & 59 deletions
This file was deleted.

.pre-commit-config.yaml

Lines changed: 6 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -17,25 +17,12 @@ repos:
1717
- id: trailing-whitespace
1818
- id: requirements-txt-fixer
1919

20-
- repo: https://github.com/psf/black
21-
rev: 23.3.0
20+
- repo: https://github.com/astral-sh/ruff-pre-commit
21+
rev: v0.4.4
2222
hooks:
23-
- id: black-jupyter
23+
- id: ruff-format
2424

25-
- repo: https://github.com/pycqa/isort
26-
rev: 5.12.0
27-
hooks:
28-
- id : isort
29-
args : ["--profile=black", "--filter-files"]
30-
31-
#- repo: https://github.com/asottile/blacken-docs
32-
# rev: 1.13.0
33-
# hooks:
34-
# - id: blacken-docs
35-
# additional_dependencies: [black==23.3.0]
36-
37-
# - repo: https://github.com/pycqa/flake8
38-
# rev: 6.0.0
25+
# - repo: https://github.com/numpy/numpydoc
26+
# rev: v1.6.0
3927
# hooks:
40-
# - id: flake8
41-
# additional_dependencies: [flake8-docstrings, Flake8-pyproject]
28+
# - id: numpydoc-validation

configs/datasets/QM9.yaml

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
data_domain: graph
2+
data_type: QM9
3+
data_name: QM9
4+
data_dir: datasets/${data_domain}/${data_type}
5+
#data_split_dir: ${oc.env:PROJECT_ROOT}/datasets/data_splits/${data_name}
6+
7+
# Dataset parameters
8+
num_features: 11
9+
num_classes: 1
10+
task: regression
11+
loss_type: mse
12+
monitor_metric: mae
13+
task_level: graph
14+

configs/datasets/gudhi_bunny.yaml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
data_domain: pointcloud
2+
data_type: gudhi
3+
data_name: gudhi_bunny
4+
data_dir: datasets/${data_domain}/${data_type}
5+
6+
# Dataset parameters
7+
task: regression
8+
loss_type: mse
9+
monitor_metric: mae
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
data_domain: pointcloud
2+
data_type: gudhi
3+
data_name: gudhi_daily_activities
4+
data_dir: datasets/${data_domain}/${data_type}
5+
6+
# Dataset parameters
7+
task: regression
8+
loss_type: mse
9+
monitor_metric: mae

configs/datasets/gudhi_sphere.yaml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
data_domain: pointcloud
2+
data_type: toy_dataset
3+
data_name: gudhi_sphere
4+
data_dir: datasets/${data_domain}/${data_type}
5+
6+
# Dataset parameters
7+
ambient_dim: 3
8+
sample: random # can also be 'grid'
9+
n_samples: 1000
10+
task: regression
11+
loss_type: mse
12+
monitor_metric: mae

configs/datasets/gudhi_spiral_2d.yaml

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
data_domain: pointcloud
2+
data_type: toy_dataset
3+
data_name: gudhi_spiral_2d
4+
data_dir: datasets/${data_domain}/${data_type}
5+
6+
# Dataset parameters
7+
task: regression
8+
loss_type: mse
9+
monitor_metric: mae

configs/datasets/gudhi_torus.yaml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
data_domain: pointcloud
2+
data_type: toy_dataset
3+
data_name: gudhi_torus
4+
data_dir: datasets/${data_domain}/${data_type}
5+
6+
# Dataset parameters
7+
dim: 3 # The dimension of the *torus* - the ambient space has dimension 2 * dim
8+
sample: random # can also be 'grid'
9+
n_samples: 1000
10+
task: regression
11+
loss_type: mse
12+
monitor_metric: mae

0 commit comments

Comments
 (0)