diff --git a/.circleci/config.yml b/.circleci/config.yml index 43a493f..7ba7917 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,9 +1,12 @@ --- -version: 2 +version: 2.1 jobs: build: docker: - - image: circleci/python:2.7.17-stretch + - image: circleci/python:3.7.6 + auth: + username: ${DOCKER_USER} + password: ${DOCKER_PASS} steps: - checkout - setup_remote_docker diff --git a/shavar.ini b/shavar.ini index d173a4b..e8b11db 100644 --- a/shavar.ini +++ b/shavar.ini @@ -58,3 +58,4 @@ default_proto_ver = 2.0 lists_served = dir://shavar/tests/lists_served lists_root = tests sentry_dsn = ${SENTRY_DSN} +refresh_lists_delay=21600 diff --git a/shavar.testing.ini b/shavar.testing.ini index 5425ac5..f58ca83 100644 --- a/shavar.testing.ini +++ b/shavar.testing.ini @@ -92,7 +92,7 @@ override = True lists_served = dir://shavar/tests/lists_served # The amount of time to wait before refreshing the list_names_served from the # above lists_served source. -refresh_lists_delay=600 +refresh_lists_delay=21600 # The default protocol version to speak. As yet, we only speak version # 2 of the protocol even though it has been superceded by Google. # Default value: 2 diff --git a/shavar/sources.py b/shavar/sources.py index 07ecda8..2a95340 100644 --- a/shavar/sources.py +++ b/shavar/sources.py @@ -7,7 +7,7 @@ from urllib.parse import urlparse from boto.exception import S3ResponseError -from boto.s3.connection import S3Connection +from boto import connect_s3 from shavar.exceptions import NoDataError, ParseError from shavar.parse import parse_dir_source, parse_file_source @@ -147,7 +147,7 @@ def __init__(self, source_url, refresh_interval): def _get_key(self): try: - conn = S3Connection() + conn = boto.connect_s3() bucket = conn.get_bucket(self.url.netloc) except S3ResponseError as e: raise NoDataError("Could not find bucket \"%s\": %s" @@ -188,7 +188,7 @@ def __init__(self, source_url, refresh_interval): def load(self): # for the closures to minimize the number of connections to S3 - conn = S3Connection() + conn = boto.connect_s3() try: bucket = conn.get_bucket(self.url.netloc)