Skip to content
This repository was archived by the owner on Nov 7, 2018. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .cfignore
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ db/*.db
.*.sw*
.env
.*.env
.cfignore
.vagrant
.idea/
*profile*
Expand Down
51 changes: 51 additions & 0 deletions create-space.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#!/bin/bash
set -e

if [ ! -n "$1" ]; then
echo "Usage: From inside open-data-maker directory..."
echo " create-space.sh spacename"
exit 1
fi
echo "Creating space: $1"
SPACE=$1

# Creating the indexing space
cf create-space ${SPACE} -o ed

echo "# When creating the space, the user which creates it is a SpaceManager"
echo "# Add developers using: cf set-space-role USERNAME ed ${SPACE} SpaceDeveloper"

# Target the space
cf target -o ed -s ${SPACE}

# Create a bservice instance used by snapshot tools
cf create-service s3 basic data-files


#cf create-user-provided-service bservice -p '{"BSERVICE_ACCESS_KEY":"YOUR_S3_ACCESS_KEY", "BSERVICE_SECRET_KEY": "YOUR_S3_SECRET_KEY", "BSERVICE_BUCKET": "YOUR_S3_BUCKET"}'

# Create backup service
echo "# To create a backup service in this space run:"
echo "# cf create-service s3 basic backup"

# Create the ElasticSearch service
cf create-service elasticsearch-swarm-1.7.5 3x eservice

echo "Creating the API server by pushing the ccapi-${SPACE} app:"
cf push -f manifest-${SPACE}.yml
echo "By default the app will use the data-files bucket, leaving DATA_PATH env blank"

echo "# For data archive / downloads, these are served via a S3 proxy"
echo "# The /downloads path is redirected via CloudFront to"
echo "# ed-public-download.apps.cloud.gov which is in the production space"
echo "# To create additional S3 proxies: https://github.com/18F/cg-s3-proxy"

echo "TODO: how to put files in the bucket"

echo "# now you need to index"
echo "cf-ssh -f manifest-${SPACE}.yml --verbose"
echo "# wait several minutes for this to connect"
echo "echo $DATA_PATH"
echo "# it should be blank, meaning you will get default cities data"
echo "rake import"
echo "# when this is done, go to https://ccapi-${SPACE}.18f.gov and explore"
7 changes: 5 additions & 2 deletions lib/data_magic.rb
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,11 @@ class InvalidDictionary < StandardError

def self.s3
if @s3.nil?
if ENV['VCAP_APPLICATION']
s3cred = ::CF::App::Credentials.find_by_service_name(ENV['s3_bucket_service'] || 'bservice')
if ENV['VCAP_APPLICATION'] # in Cloud Foundry
cfcred = ::CF::App::Credentials.find_by_service_name(ENV['s3_bucket_service'] || 'data-files')
s3cred = {'access_key'=> cfcred['access_key_id'], 'secret_key' => cfcred['secret_access_key']}
bucket_name = cfcred['bucket']
# TODO: default DATA_PATH to s3://{bucket_name}
else
s3cred = {'access_key'=> ENV['s3_access_key'], 'secret_key' => ENV['s3_secret_key']}
end
Expand Down
16 changes: 16 additions & 0 deletions manifest-testing.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
---
applications:
- name: ccapi
host: ccapi-testing
domain: 18f.gov
command: bundle exec puma -C ./config/puma.rb
instances: 1
memory: 1G
services:
- data-files
- eservice
env:
MAX_THREADS: 5
WEB_CONCURRENCY: 1
INDEX_APP: enable
NPROCS: 1
35 changes: 22 additions & 13 deletions script/s3config.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
# @s3 = ::Aws::S3::Client.new

require 'dotenv'
require 'json'

branch = `echo $(git symbolic-ref --short HEAD)`.chomp

Expand All @@ -12,26 +13,34 @@
puts "using APP_ENV from environment #{APP_ENV}"
else
case branch
when "master"
APP_ENV = "production"
when "staging"
APP_ENV = "staging"
else
puts "not on master or staging branch lets use dev"
APP_ENV = "dev"
when "master"
APP_ENV = "production"
when "staging"
APP_ENV = "staging"
else
puts "not on master or staging branch lets use dev"
APP_ENV = "dev" # FIXME: shouldn't the APP_ENV be testing?
end
end

Dotenv.load(
File.expand_path("../../.#{APP_ENV}.env", __FILE__),
File.expand_path("../../.env", __FILE__))
cf_credentials = `cf target -o ed -s #{APP_ENV} && echo "$(cf env ccapi)" | tail -n +5 | sed -n -e :a -e '1,10!{P;N;D;};N;ba'`
cf_json_str = cf_credentials.gsub("\n", '').gsub(/^[^{]+{/, '{').gsub(/{\s+"VCAP_APPLICATION".+$/, '')

cf_json = JSON.parse(cf_json_str)
cf_data_files = cf_json['VCAP_SERVICES']['s3'].detect {|j| j['name'] == 'data-files'}

fail "Unable to find data-files configuration" if cf_data_files.nil?

ENV['CF_CREDENTIALS'] = cf_json_str
ENV['AWS_ACCESS_KEY_ID'] = cf_data_files['credentials']['access_key_id']
ENV['AWS_SECRET_ACCESS_KEY'] = cf_data_files['credentials']['secret_access_key']
ENV['BUCKET_NAME'] = cf_data_files['credentials']['bucket']

require 'aws-sdk'
puts "app env: #{APP_ENV}"
puts "bucket name: #{ENV['s3_bucket']}"

puts "bucket name: #{ENV['BUCKET_NAME']}"

s3cred = {'access_key'=> ENV['s3_access_key'], 'secret_key' => ENV['s3_secret_key']}
s3cred = {'access_key'=> ENV['AWS_ACCESS_KEY_ID'], 'secret_key' => ENV['AWS_SECRET_ACCESS_KEY']}

::Aws.config[:credentials] = ::Aws::Credentials.new(s3cred['access_key'], s3cred['secret_key'])
::Aws.config[:region] = 'us-east-1'
2 changes: 1 addition & 1 deletion script/s3pull
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ require_relative 's3config.rb'

@s3 = ::Aws::S3::Client.new

bucket = ENV['s3_bucket']
bucket = ENV['BUCKET_NAME']

dirname = 'real-data'
unless File.directory?(dirname)
Expand Down
2 changes: 1 addition & 1 deletion script/s3push
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ require_relative 's3config.rb'
@s3 = ::Aws::S3::Client.new

dirname = 'real-data'
bucket_name = ENV['s3_bucket']
bucket_name = ENV['BUCKET_NAME']
datayamlpath = File.expand_path("../../#{dirname}/#{bucket_name}.yaml", __FILE__)

puts "copying #{datayamlpath}"
Expand Down