Skip to content

Commit

Permalink
aws example
Browse files Browse the repository at this point in the history
  • Loading branch information
mzouink committed Apr 29, 2024
1 parent 95d3f79 commit 1b414a7
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 69 deletions.
15 changes: 4 additions & 11 deletions dacapo/store/create_store.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from .local_array_store import LocalArrayStore
from .local_weights_store import LocalWeightsStore
from .s3_weights_store import S3WeightsStore
from .mongo_config_store import MongoConfigStore
from .file_config_store import FileConfigStore
from .mongo_stats_store import MongoStatsStore
Expand Down Expand Up @@ -32,7 +31,7 @@ def create_config_store():
db_name = options.mongo_db_name
return MongoConfigStore(db_host, db_name)
elif options.type == "files":
store_path = Path(options.runs_base_dir).expanduser()
store_path = Path(options.runs_base_dir)
return FileConfigStore(store_path / "configs")
else:
raise ValueError(f"Unknown store type {options.type}")
Expand Down Expand Up @@ -63,7 +62,7 @@ def create_stats_store():
db_name = options.mongo_db_name
return MongoStatsStore(db_host, db_name)
elif options.type == "files":
store_path = Path(options.runs_base_dir).expanduser()
store_path = Path(options.runs_base_dir)
return FileStatsStore(store_path / "stats")
else:
raise ValueError(f"Unknown store type {options.type}")
Expand All @@ -86,14 +85,8 @@ def create_weights_store():

options = Options.instance()

if options.store == "s3":
s3_bucket = options.s3_bucket
return S3WeightsStore(s3_bucket)
elif options.store == "local":
base_dir = Path(options.runs_base_dir).expanduser()
return LocalWeightsStore(base_dir)
else:
raise ValueError(f"Unknown weights store type {options.type}")
base_dir = Path(options.runs_base_dir)
return LocalWeightsStore(base_dir)


def create_array_store():
Expand Down
54 changes: 0 additions & 54 deletions dacapo/store/s3_weights_store.py

This file was deleted.

14 changes: 14 additions & 0 deletions examples/aws/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
You can work locally using S3 data by setting the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables. You can also set the `AWS_REGION` environment variable to specify the region to use. If you are using a profile, you can set the `AWS_PROFILE` environment variable to specify the profile to use.

```bash
aws configure
```

In order to store checkpoints and experiments data in S3, you need to modify `dacapo.yaml` to include the following:

```yaml
runs_base_dir: "s3://dacapotest"
```
For configs and stats, you can save them locally or s3 by setting `type: files` or for mongodb by setting `type: mongo` in the `dacapo.yaml` file.

30 changes: 30 additions & 0 deletions examples/aws/aws_store_check.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# %%
import dacapo

# from import create_config_store

config_store = dacapo.store.create_store.create_config_store()

# %%
from dacapo import Options

options = Options.instance()

# %%
options
# %%
from dacapo.experiments.tasks import DistanceTaskConfig

task_config = DistanceTaskConfig(
name="cosem_distance_task_4nm",
channels=["mito"],
clip_distance=40.0,
tol_distance=40.0,
scale_factor=80.0,
)

# %%

config_store.store_task_config(task_config)

# %%
5 changes: 1 addition & 4 deletions examples/aws/dacapo.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,3 @@

store: "s3"
s3_bucket: "s3//dacapo-tests"

runs_base_dir: "s3://dacapotest"
type: "files"

0 comments on commit 1b414a7

Please sign in to comment.