Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 4 additions & 9 deletions cougarvision_utils/detect_img.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,11 @@
from animl import inference, split
from sageranger import is_target, attach_image, post_event
from animl.detect import detect_MD_batch, parse_MD
import os

from cougarvision_utils.cropping import draw_bounding_box_on_image
from cougarvision_utils.alert import smtp_setup, send_alert
from cougarvision_visualize.visualize_helper import get_last_file_number
from cougarvision_visualize.visualize_helper import create_folder


with open("config/cameratraps.yml", 'r') as stream:
camera_traps_config = yaml.safe_load(stream)
sys.path.append(camera_traps_config['camera_traps_path'])


def detect(images, config, c_model, classes, d_model):
Expand Down Expand Up @@ -121,10 +116,10 @@ def detect(images, config, c_model, classes, d_model):
img.save(image_bytes, format="JPEG")
img_byte = image_bytes.getvalue()
if visualize_output is True:
folder_path = create_folder(labeled_img)
last_file_number = get_last_file_number(folder_path)
os.makedirs(labeled_img, exist_ok=True)
last_file_number = get_last_file_number(labeled_img)
new_file_number = last_file_number + 1
new_file_name = f"{folder_path}/image_{new_file_number}.jpg"
new_file_name = f"{labeled_img}/image_{new_file_number}.jpg"

with open(new_file_name, "wb") as folder:
folder.write(img_byte)
Expand Down
31 changes: 14 additions & 17 deletions cougarvision_utils/get_images.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,8 @@
last_id.txt as well, but it creates a new one if there is not
one currently present.

'''

import json
import urllib.request
import os.path
import logging
import requests
import numpy as np
from cougarvision_visualize.visualize_helper import get_last_file_number
from cougarvision_visualize.visualize_helper import create_folder


'''
request examples
Here are some different strikeforce api commands for future
reference:

get list of camaras
request <- "cameras"
Expand Down Expand Up @@ -52,6 +40,15 @@
parameters <- ""
'''

import json
import urllib.request
import os.path
import os
import logging
import requests
import numpy as np
from cougarvision_visualize.visualize_helper import get_last_file_number


def request_strikeforce(username, auth_token, base, request, parameters):
'''
Expand Down Expand Up @@ -153,9 +150,9 @@ def fetch_image_api(config):
info['file_thumb_url'], newname])

if visualize_output is True:
file_path = create_folder(unlabeled_img)
newname = file_path + 'image'
new_file_num = get_last_file_number(file_path)
os.makedirs(unlabeled_img, exist_ok=True)
newname = unlabeled_img + 'image'
new_file_num = get_last_file_number(unlabeled_img)
new_file_num = new_file_num + 1
new_file_num = str(new_file_num)
newname += "_" + new_file_num
Expand Down
12 changes: 0 additions & 12 deletions cougarvision_visualize/visualize_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,4 @@ def get_last_file_number(folder_path):
if num: # If there are digits in the filename
max_num = max(max_num, int(num[-1])) # Use the last set of digits as the number
return max_num


def create_folder(folder_path):
"""
Check if a folder exists at the specified path, and create it if it doesn't.

folder_path: The path of the folder to check and potentially create.
return: path
"""
if not os.path.exists(folder_path):
os.makedirs(folder_path)
return folder_path

96 changes: 46 additions & 50 deletions fetch_and_alert.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import logging
import yaml
import schedule
import sys


from cougarvision_utils.detect_img import detect
Expand All @@ -35,31 +36,6 @@
from animl import megadetector


# Numpy FutureWarnings from tensorflow import
warnings.filterwarnings('ignore', category=FutureWarning)
# Parse arguments
PARSER = argparse.ArgumentParser(description='Retrieves images from \
email & web scraper & runs detection')
PARSER.add_argument('config', type=str, help='Path to config file')
ARGS = PARSER.parse_args()
CONFIG_FILE = ARGS.config
# Load Configuration Settings from YML file
with open(CONFIG_FILE, 'r', encoding='utf-8') as stream:
CONFIG = yaml.safe_load(stream)
# Set Email Variables for fetching
USERNAME = CONFIG['username']
PASSWORD = CONFIG['password']
TOKEN = CONFIG['token']
AUTH = CONFIG['authorization']
CLASSIFIER = CONFIG['classifier_model']
DETECTOR = CONFIG['detector_model']
DEV_EMAILS = CONFIG['dev_emails']
HOST = 'imap.gmail.com'
RUN_SCHEDULER = CONFIG['run_scheduler']
VISUALIZE_OUTPUT = CONFIG['visualize_output']
LABELS = CONFIG['classes']


def logger():
'''Function to define logging file parameters'''
msg_intro = "%(levelname)s:%(asctime)s:%(module)s:%(funcName)s:"
Expand All @@ -70,46 +46,66 @@ def logger():
force=True)


# Initialize logger now because it will protect against
# handlers that get created when classifer and detector are intialized
logger()


# Set interval for checking in
CHECKIN_INTERVAL = CONFIG['checkin_interval']
print("Loading classifier")
# load models once
CLASSIFIER_MODEL, CLASSES = load_model(CLASSIFIER, LABELS)
print("Finished loading classifier")
print("Begin loading detector")
DETECTOR_MODEL = megadetector.MegaDetector(DETECTOR)
print("Finished loading detector")


def fetch_detect_alert():
def fetch_detect_alert(CONFIG, CLASSIFIER, CLASSES, DETECTOR):
'''Functions for fetching images, detection, and sending alerts'''
# Run the scheduler
print("Running fetch_and_alert")
print("Fetching images")
images = fetch_image_api(CONFIG)
print('Finished fetching images')
print('Starting Detection')

for i in images:
detect(i, CONFIG, CLASSIFIER_MODEL, CLASSES, DETECTOR_MODEL)

detect(i, CONFIG, CLASSIFIER, CLASSES, DETECTOR)
print('Finished Detection')
print("Sleeping since: " + str(dt.now()))


def main():
''''Runs main program and schedules future runs'''
fetch_detect_alert()

'''Runs main program and schedules future runs'''
logger()
# Numpy FutureWarnings from tensorflow import
warnings.filterwarnings('ignore', category=FutureWarning)
# Parse arguments
PARSER = argparse.ArgumentParser(description='Retrieves images from \
email & web scraper & runs detection')
PARSER.add_argument('config', type=str, help='Path to config file')
ARGS = PARSER.parse_args()
CONFIG_FILE = ARGS.config
# Load Configuration Settings from YML file
with open(CONFIG_FILE, 'r', encoding='utf-8') as stream:
CONFIG = yaml.safe_load(stream)
with open("config/cameratraps.yml", 'r') as stream:
camera_traps_config = yaml.safe_load(stream)
sys.path.append(camera_traps_config['camera_traps_path'])
# Set Email Variables for fetching
USERNAME = CONFIG['username']
PASSWORD = CONFIG['password']
TOKEN = CONFIG['token']
AUTH = CONFIG['authorization']
CLASSIFIER_MODEL = CONFIG['classifier_model']
DETECTOR_MODEL = CONFIG['detector_model']
DEV_EMAILS = CONFIG['dev_emails']
HOST = 'imap.gmail.com'
RUN_SCHEDULER = CONFIG['run_scheduler']
VISUALIZE_OUTPUT = CONFIG['visualize_output']
LABELS = CONFIG['classes']
# Set interval for checking in
CHECKIN_INTERVAL = CONFIG['checkin_interval']

# Load models once
print("Loading classifier")
CLASSIFIER, CLASSES = load_model(CLASSIFIER_MODEL, LABELS)
print("Finished loading classifier")

print("Begin loading detector")
DETECTOR = megadetector.MegaDetector(DETECTOR_MODEL)
print("Finished loading detector")

fetch_detect_alert(CONFIG, CLASSIFIER, CLASSES, DETECTOR)
if VISUALIZE_OUTPUT is True:
schedule.every(RUN_SCHEDULER).seconds.do(fetch_detect_alert)
schedule.every(RUN_SCHEDULER).seconds.do(fetch_detect_alert(CONFIG, CLASSIFIER, CLASSES, DETECTOR))
else:
schedule.every(RUN_SCHEDULER).minutes.do(fetch_detect_alert)
schedule.every(RUN_SCHEDULER).minutes.do(fetch_detect_alert(CONFIG, CLASSIFIER, CLASSES, DETECTOR))
schedule.every(CHECKIN_INTERVAL).hours.do(checkin, DEV_EMAILS,
USERNAME, PASSWORD, HOST)
# schedule.every(30).days.do(post_monthly_obs, TOKEN, AUTH)
Expand Down