Browse Source

Merge pull request #71 from pmh-ds/master

Added support for multiple AWS profiles
main
Isaac Slavitt 8 years ago committed by GitHub
parent
commit
dc14659408
  1. 4
      .gitignore
  2. 1
      cookiecutter.json
  3. 13
      docs/docs/index.md
  4. 3
      tests/test_creation.py
  5. 9
      {{ cookiecutter.repo_name }}/Makefile

4
.gitignore vendored

@ -2,3 +2,7 @@ docs/site/
# OSX Junk # OSX Junk
.DS_Store .DS_Store
# test cache
.cache/*
tests/__pycache__/*

1
cookiecutter.json

@ -5,5 +5,6 @@
"description": "A short description of the project.", "description": "A short description of the project.",
"open_source_license": ["MIT", "BSD", "Not open source"], "open_source_license": ["MIT", "BSD", "Not open source"],
"s3_bucket": "[OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')", "s3_bucket": "[OPTIONAL] your-bucket-for-syncing-data (do not include 's3://')",
"aws_profile": "default",
"python_interpreter": ["python", "python3"] "python_interpreter": ["python", "python3"]
} }

13
docs/docs/index.md

@ -215,6 +215,19 @@ database_url = os.environ.get("DATABASE_URL")
other_variable = os.environ.get("OTHER_VARIABLE") other_variable = os.environ.get("OTHER_VARIABLE")
``` ```
#### AWS CLI configuration
When using Amazon S3 to store data, a simple method of managing AWS access is to set your access keys to environment variables. However, managing mutiple sets of keys on a single machine (e.g. when working on multiple projects) it is best to use a [credentials file](https://docs.aws.amazon.com/cli/latest/userguide/cli-config-files.html), typically located in `~/.aws/credentials`. A typical file might look like:
```
[default]
aws_access_key_id=myaccesskey
aws_secret_access_key=mysecretkey
[another_project]
aws_access_key_id=myprojectaccesskey
aws_secret_access_key=myprojectsecretkey
```
You can add the profile name when initialising a project; assuming no applicable environment variables are set, the profile credentials will be used be default.
### Be conservative in changing the default folder structure ### Be conservative in changing the default folder structure
To keep this structure broadly applicable for many different kinds of projects, we think the best approach is to be liberal in changing the folders around for _your_ project, but be conservative in changing the default structure for _all_ projects. To keep this structure broadly applicable for many different kinds of projects, we think the best approach is to be liberal in changing the folders around for _your_ project, but be conservative in changing the default structure for _all_ projects.

3
tests/test_creation.py

@ -80,8 +80,7 @@ def test_folders(default_baked_project):
] ]
ignored_dirs = [ ignored_dirs = [
default_baked_project, default_baked_project
os.path.join(default_baked_project, '__pycache__')
] ]
abs_expected_dirs = [os.path.join(default_baked_project, d) for abs_expected_dirs = [os.path.join(default_baked_project, d) for

9
{{ cookiecutter.repo_name }}/Makefile

@ -6,6 +6,7 @@
PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST)))) PROJECT_DIR := $(shell dirname $(realpath $(lastword $(MAKEFILE_LIST))))
BUCKET = {{ cookiecutter.s3_bucket }} BUCKET = {{ cookiecutter.s3_bucket }}
PROFILE = {{ cookiecutter.aws_profile }}
PROJECT_NAME = {{ cookiecutter.repo_name }} PROJECT_NAME = {{ cookiecutter.repo_name }}
PYTHON_INTERPRETER = {{ cookiecutter.python_interpreter }} PYTHON_INTERPRETER = {{ cookiecutter.python_interpreter }}
@ -37,11 +38,19 @@ lint:
## Upload Data to S3 ## Upload Data to S3
sync_data_to_s3: sync_data_to_s3:
ifeq (default,$(PROFILE))
aws s3 sync data/ s3://$(BUCKET)/data/ aws s3 sync data/ s3://$(BUCKET)/data/
else
aws s3 sync data/ s3://$(BUCKET)/data/ --profile $(PROFILE)
endif
## Download Data from S3 ## Download Data from S3
sync_data_from_s3: sync_data_from_s3:
ifeq (default,$(PROFILE))
aws s3 sync s3://$(BUCKET)/data/ data/ aws s3 sync s3://$(BUCKET)/data/ data/
else
aws s3 sync s3://$(BUCKET)/data/ data/ --profile $(PROFILE)
endif
## Set up python interpreter environment ## Set up python interpreter environment
create_environment: create_environment:

Loading…
Cancel
Save