Bulk commit - start using Git again

fix-v0.2
Tom Wilson 6 years ago
parent 3da06fe888
commit 999e991298

@ -0,0 +1,9 @@
[shepherd]
plugin_path = "~/shepherd/"
plugins = ["picam"]
root_dir = "~/shepherd/"
conf_edit_path = "~/shepherd.toml"
[picam]
[[picam.trigger]]
hour = "00-23"
minute = "*"

@ -0,0 +1,80 @@
#!/usr/bin/env python3
import cv2
from PIL import Image, ImageDraw, ImageFont
print(cv2.__version__)
gst_str = ('v4l2src device=/dev/video0 ! '
'videoconvert ! appsink drop=true max-buffers=1 sync=false')
print(gst_str)
logo_im = Image.open('smallshepherd.png')
overlayfont = "DejaVuSansMono.ttf"
try:
fnt = ImageFont.truetype(overlayfont, 50)
except IOError:
fnt = ImageFont.load_default()
loaded_fonts={}
loaded_logos={}
vidcap = cv2.VideoCapture(gst_str, cv2.CAP_GSTREAMER)
while True:
breakpoint()
vidcap.grab()
read_flag, frame = vidcap.read()
print(read_flag)
#overlay = frame.copy()
# You may need to convert the color.
#Convert over to PIL. Mostly so we can use our own font.
img = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
im_pil = Image.fromarray(img)
font_size = int(im_pil.height/40)
margin_size = int(font_size/5)
if font_size not in loaded_fonts:
loaded_fonts[font_size] = ImageFont.truetype(overlayfont, int(font_size*0.9))
thisfont = loaded_fonts[font_size]
if font_size not in loaded_logos:
newsize = (int(logo_im.width*(font_size/logo_im.height)),font_size)
loaded_logos[font_size] = logo_im.resize(newsize, Image.BILINEAR)
thislogo = loaded_logos[font_size]
overlay = Image.new('RGBA',(im_pil.width,font_size+(2*margin_size)), (0,0,0))
overlay.paste(thislogo, (int((overlay.width-thislogo.width)/2),margin_size))
draw = ImageDraw.Draw(overlay)
draw.text((margin_size*2, margin_size), "SARDIcam-1", font=thisfont,
fill=(255, 255, 255, 255))
datetext = "2019-07-24 20:22:31"
datewidth, _ = draw.textsize(datetext,thisfont)
draw.text((overlay.width-(margin_size*2)-datewidth, margin_size), datetext, font=thisfont,
fill=(255, 255, 255, 255))
overlay.putalpha(128)
im_pil.paste(overlay, (0,im_pil.height-overlay.height),overlay)
im_pil.save("test.jpg", "JPEG", quality=90)
# For reversing the operation:
#im_np = np.asarray(im_pil)
#cv2.rectangle(overlay,(200,200),(500,500),(255,0,0),-1)
#cv2.addWeighted(overlay, 0.3, frame, 0.7, 0, frame)
#cv2.imwrite("frame.jpg", frame)
# print out build properties:
# print(cv2.getBuildInformation())

@ -1,6 +1,10 @@
import re import re
import toml import toml
from shepherd.freezedry import freezedryable, rehydrate
class InvalidConfigError(Exception): class InvalidConfigError(Exception):
pass pass
@ -31,28 +35,37 @@ class InvalidConfigError(Exception):
# config def required interface: # config def required interface:
# Validate values. # Validate values.
# The Table and Array terms used here are directly from the TOML convention, but they essentially
# map directly to Dictionaries (Tables), and Lists (Arrays)
class _ConfigDefinition(): class _ConfigDefinition():
def __init__(self, default=None, optional=False): def __init__(self, default=None, optional=False, helptext=""):
self.default = default self.default = default
self.optional = optional self.optional = optional
self.helptext = helptext
def validate(self, value): # pylint: disable=W0613 def validate(self, value): # pylint: disable=W0613
raise TypeError("_ConfigDefinition.validate() is an abstract method") raise TypeError("_ConfigDefinition.validate() is an abstract method")
@freezedryable
class BoolDef(_ConfigDefinition): class BoolDef(_ConfigDefinition):
def __init__(self, default=None, optional=False): # pylint: disable=W0235 def __init__(self, default=None, optional=False, helptext=""): # pylint: disable=W0235
super().__init__(default, optional) super().__init__(default, optional, helptext)
def validate(self, value): def validate(self, value):
if not isinstance(value, bool): if not isinstance(value, bool):
raise InvalidConfigError("Config value must be a boolean") raise InvalidConfigError("Config value must be a boolean")
@freezedryable
class IntDef(_ConfigDefinition): class IntDef(_ConfigDefinition):
def __init__(self, default=None, minval=None, maxval=None, def __init__(self, default=None, minval=None, maxval=None,
optional=False): optional=False, helptext=""):
super().__init__(default, optional) super().__init__(default, optional, helptext)
self.minval = minval self.minval = minval
self.maxval = maxval self.maxval = maxval
@ -66,11 +79,11 @@ class IntDef(_ConfigDefinition):
raise InvalidConfigError("Config value must be <= " + raise InvalidConfigError("Config value must be <= " +
str(self.maxval)) str(self.maxval))
@freezedryable
class StringDef(_ConfigDefinition): class StringDef(_ConfigDefinition):
def __init__(self, default=None, minlength=None, maxlength=None, def __init__(self, default=None, minlength=None, maxlength=None,
optional=False): optional=False, helptext=""):
super().__init__(default, optional) super().__init__(default, optional, helptext)
self.minlength = minlength self.minlength = minlength
self.maxlength = maxlength self.maxlength = maxlength
@ -84,10 +97,10 @@ class StringDef(_ConfigDefinition):
raise InvalidConfigError("Config string length must be <= " + raise InvalidConfigError("Config string length must be <= " +
str(self.maxlength)) str(self.maxlength))
@freezedryable
class TableDef(_ConfigDefinition): class TableDef(_ConfigDefinition):
def __init__(self, default=None, optional=False): def __init__(self, default=None, optional=False, helptext=""):
super().__init__(default, optional) super().__init__(default, optional, helptext)
self.def_table = {} self.def_table = {}
def add_def(self, name, newdef): def add_def(self, name, newdef):
@ -99,7 +112,7 @@ class TableDef(_ConfigDefinition):
self.def_table[name] = newdef self.def_table[name] = newdef
return newdef return newdef
def validate(self, value_table): # pylint: disable=W0221 def validate(self, value_table): # pylint: disable=W0221
def_set = set(self.def_table.keys()) def_set = set(self.def_table.keys())
value_set = set(value_table.keys()) value_set = set(value_table.keys())
@ -133,23 +146,23 @@ class _ArrayDefMixin():
e.args = ("Array index: " + str(index),) + e.args e.args = ("Array index: " + str(index),) + e.args
raise raise
@freezedryable
class BoolArrayDef(_ArrayDefMixin, BoolDef): class BoolArrayDef(_ArrayDefMixin, BoolDef):
pass pass
@freezedryable
class IntArrayDef(_ArrayDefMixin, IntDef): class IntArrayDef(_ArrayDefMixin, IntDef):
pass pass
@freezedryable
class StringArrayDef(_ArrayDefMixin, StringDef): class StringArrayDef(_ArrayDefMixin, StringDef):
pass pass
@freezedryable
class TableArrayDef(_ArrayDefMixin, TableDef): class TableArrayDef(_ArrayDefMixin, TableDef):
pass pass
@freezedryable
class ConfDefinition(TableDef): class ConfDefinition(TableDef):
pass pass
@ -172,7 +185,8 @@ class ConfigManager():
raise TypeError("Supplied config definition must be an instance " raise TypeError("Supplied config definition must be an instance "
"of ConfDefinition") "of ConfDefinition")
if table_name not in self.root_config: if table_name not in self.root_config:
raise InvalidConfigError("Config must contain table: " + table_name) raise InvalidConfigError(
"Config must contain table: " + table_name)
try: try:
conf_def.validate(self.root_config[table_name]) conf_def.validate(self.root_config[table_name])
except InvalidConfigError as e: except InvalidConfigError as e:
@ -186,10 +200,12 @@ class ConfigManager():
config_values[name] = self.get_config(name, conf_def) config_values[name] = self.get_config(name, conf_def)
return config_values return config_values
def get_module_configs(self, modules): def get_plugin_configs(self, plugin_classes):
config_values = {} config_values = {}
for name, module in modules.items(): for plugin_name, plugin_class in plugin_classes.items():
config_values[name] = self.get_config(name, module.conf_def) conf_def = ConfDefinition()
plugin_class.define_config(conf_def)
config_values[plugin_name] = self.get_config(plugin_name, conf_def)
return config_values return config_values
def dump_toml(self): def dump_toml(self):
@ -221,3 +237,4 @@ def update_toml_message(filepath, message):
def gen_comment(string): def gen_comment(string):
return '\n# shepherd_message: ' + '\n# '.join(string.replace('#', '').splitlines()) + '\n' return '\n# shepherd_message: ' + '\n# '.join(string.replace('#', '').splitlines()) + '\n'

@ -0,0 +1,62 @@
import os
import uuid
import subprocess
import requests
import threading
import json
# Check for shepherd.new file in edit conf dir. If there,
# or if no shepherd.id file can be found, generate a new one.
# For now, also attempt to delete /var/lib/zerotier-one/identity.public and identity.secret
# Once generated, if it was due to shepherd.new file, delete it.
#Start new thread, and push ID and core config to api.shepherd.distreon.net/client/update
client_id = None
def _update_job(core_config):
payload = {"client_id":client_id, "core_config":core_config}
json_string = json.dumps(payload)
try:
r = requests.post('http://api.shepherd.distreon.net/client/update', data=json_string)
except requests.exceptions.ConnectionError:
pass
def generate_new_zerotier_id():
print("Removing old Zerotier id files")
try:
os.remove("/var/lib/zerotier-one/identity.public")
os.remove("/var/lib/zerotier-one/identity.secret")
except:
pass
print("Restarting Zerotier systemd service to regenerate ID")
subprocess.run(["systemctl", "restart", "zerotier-one.service"])
def generate_new_id(root_dir):
global client_id
with open(os.path.join(root_dir, "shepherd.id"), 'w+') as f:
new_id = uuid.uuid1()
client_id = str(new_id)
f.write(client_id)
generate_new_zerotier_id()
def init_control(core_config):
global client_id
root_dir = os.path.expanduser(core_config["root_dir"])
editconf_dir = os.path.dirname(os.path.expanduser(core_config["conf_edit_path"]))
if os.path.isfile(os.path.join(editconf_dir, "shepherd.new")):
generate_new_id(root_dir)
os.remove(os.path.join(editconf_dir, "shepherd.new"))
elif not os.path.isfile(os.path.join(root_dir, "shepherd.id")):
generate_new_id(root_dir)
else:
with open(os.path.join(root_dir, "shepherd.id"), 'r') as id_file:
client_id = id_file.readline().strip()
print(F"Client ID is: {client_id}")
control_thread = threading.Thread(target=_update_job, args=(core_config,))
control_thread.start()

@ -5,19 +5,15 @@
# APScheduler # APScheduler
import sys
import argparse import argparse
import os import os
from datetime import datetime
import toml import toml
import shepherd.scheduler
import shepherd.config import shepherd.config
import shepherd.module import shepherd.plugin
import shepherd.control
from apscheduler.schedulers.blocking import BlockingScheduler
from datetime import datetime
from types import SimpleNamespace
# Future implementations of checking config differences should be done on # Future implementations of checking config differences should be done on
@ -32,104 +28,107 @@ from types import SimpleNamespace
# Fix this by saving the working config to /boot when new config applied # Fix this by saving the working config to /boot when new config applied
# remotely. # remotely.
def load_config(config_path): def define_core_config(confdef):
confdef.add_def("id", shepherd.config.StringDef())
confdef.add_def("plugins", shepherd.config.StringArrayDef())
confdef.add_def("plugin_dir", shepherd.config.StringDef())
confdef.add_def("root_dir", shepherd.config.StringDef())
confdef.add_def("conf_edit_path", shepherd.config.StringDef())
def load_config(config_path,load_editconf):
# Load config from default location
confman = shepherd.config.ConfigManager() confman = shepherd.config.ConfigManager()
confman.load(os.path.expanduser(config_path)) confman.load(os.path.expanduser(config_path))
core_conf = confman.get_config("shepherd", core_confdef()) # Create core confdef and populate it
core_confdef = shepherd.config.ConfDefinition()
# Check for an edit_conf file, and try to load it define_core_config(core_confdef)
try: # attempt to retrive core config and validate it
edit_confman = shepherd.config.ConfigManager() core_conf = confman.get_config("shepherd", core_confdef)
edit_confman.load(os.path.expanduser(core_conf["conf_edit_path"])) edit_confman = None
core_edit_conf = edit_confman.get_config("shepherd", core_confdef()) conf_edit_message = None
mod_classes = shepherd.module.find_modules(core_edit_conf["modules"]) if load_editconf:
mod_configs = edit_confman.get_module_configs(mod_classes) # Check for an edit_conf file, and try to load it and plugin configs
try:
except FileNotFoundError: edit_confman = shepherd.config.ConfigManager()
conf_edit_message = None edit_confman.load(os.path.expanduser(core_conf["conf_edit_path"]))
except shepherd.config.InvalidConfigError as e: core_edit_conf = edit_confman.get_config("shepherd", core_confdef)
conf_edit_message = "Invalid config.\n " + str(e.args)
except toml.TomlDecodeError as e: plugin_classes = shepherd.plugin.find_plugins(
conf_edit_message = "TOML syntax error.\n" + str(e) core_edit_conf["plugins"])
except Exception: plugin_configs = edit_confman.get_plugin_configs(plugin_classes)
conf_edit_message = "Error processing new config"
else: except FileNotFoundError:
conf_edit_message = ("Successfully applied this config at:" + conf_edit_message = None
str(datetime.now())) except shepherd.config.InvalidConfigError as e:
confman = edit_confman conf_edit_message = "Invalid config.\n " + str(e.args)
core_conf = core_edit_conf except toml.TomlDecodeError as e:
conf_edit_message = "TOML syntax error.\n" + str(e)
if conf_edit_message is not None: except Exception:
shepherd.config.update_toml_message( conf_edit_message = "Error processing new config"
os.path.expanduser(core_conf["conf_edit_path"]), conf_edit_message) else:
conf_edit_message = ("Successfully applied this config at:" +
# if editconf failed, load current config str(datetime.now()))
confman = edit_confman
core_conf = core_edit_conf
if conf_edit_message is not None:
shepherd.config.update_toml_message(
os.path.expanduser(core_conf["conf_edit_path"]), conf_edit_message)
# if editconf failed, load current config for plugins
if confman is not edit_confman: if confman is not edit_confman:
mod_classes = shepherd.module.find_modules(core_conf["modules"]) plugin_classes = shepherd.plugin.find_plugins(core_conf["plugins"])
mod_configs = confman.get_module_configs(mod_classes) plugin_configs = confman.get_plugin_configs(plugin_classes)
# If no editconf file was found, write out the current config as a template # If no editconf file was found, write out the current config as a template
if conf_edit_message is None: if (conf_edit_message is None) and load_editconf:
confman.dump_to_file(os.path.expanduser(core_conf["conf_edit_path"]), confman.dump_to_file(os.path.expanduser(core_conf["conf_edit_path"]),
"Config generated at:" + str(datetime.now())) "Config generated at:" + str(datetime.now()))
return (core_conf, mod_classes, mod_configs) return (core_conf, plugin_classes, plugin_configs)
def core_confdef():
confdef = shepherd.config.ConfDefinition()
confdef.add_def("id", shepherd.config.StringDef())
confdef.add_def("modules", shepherd.config.StringArrayDef())
confdef.add_def("root_dir", shepherd.config.StringDef())
confdef.add_def("conf_edit_path", shepherd.config.StringDef())
return confdef
class ShepherdInterface(shepherd.module.Interface):
def __init__(self, scheduler, config):
super().__init__(None)
self.id = config["id"]
self.root_dir = config["root_dir"]
self.scheduler = scheduler
def main(): def main():
argparser = argparse.ArgumentParser(description="Keep track of a mob " argparser = argparse.ArgumentParser(description="Keep track of a mob "
"of roaming Pis") "of roaming Pis")
argparser.add_argument("configfile", nargs='?', metavar="configfile", argparser.add_argument("configfile", nargs='?', metavar="configfile",
help="Path to configfile", default="shepherd.toml") help="Path to configfile", default="shepherd.toml")
argparser.add_argument('-e', '--noedit',help="Disable the editable config temporarily", action="store_true", default=False)
argparser.add_argument("-t", "--test", help="Test and interface function of the from 'plugin:function'",
default=None)
args = argparser.parse_args() args = argparser.parse_args()
confman = shepherd.config.ConfigManager() confman = shepherd.config.ConfigManager()
confman.load(os.path.expanduser(args.configfile)) confman.load(os.path.expanduser(args.configfile))
core_conf = confman.get_config("shepherd", core_confdef())
breakpoint()
(core_conf, mod_classes, mod_configs) = load_config(args.configfile) (core_conf, plugin_classes, plugin_configs) = load_config(args.configfile, not args.noedit)
if args.test is None:
shepherd.control.init_control(core_conf)
scheduler = BlockingScheduler() shepherd.scheduler.init_scheduler(core_conf)
core_interface = ShepherdInterface(scheduler, core_conf)
# get validated config values for modules, then instantiate them shepherd.plugin.init_plugins(plugin_classes, plugin_configs, core_conf)
modules = {}
mod_interfaces = {}
for name, mod_class in mod_classes.items():
modules[name] = mod_class(mod_configs[name], core_interface)
mod_interfaces[name] = modules[name].interface
# run post init after all modules are loaded to allow them to hook in to shepherd.scheduler.restore_jobs()
# each other
mod_interfaces = SimpleNamespace(**mod_interfaces)
for module in modules.values():
module.init_other_interfaces(mod_interfaces)
print(str(datetime.now())) print(str(datetime.now()))
if args.test is not None:
(test_plugin, test_func) = args.test.split(':')
func = getattr(shepherd.plugin.plugin_functions[test_plugin], test_func)
print(func())
return
print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C')) print('Press Ctrl+{0} to exit'.format('Break' if os.name == 'nt' else 'C'))
try: try:
scheduler.start() shepherd.scheduler.start()
except (KeyboardInterrupt, SystemExit): except (KeyboardInterrupt, SystemExit):
pass pass

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 MiB

@ -0,0 +1,114 @@
from enum import Enum, auto
import inspect
class RehydrateMethod(Enum):
DIRECT = auto()
INIT = auto()
CLASS_METHOD = auto()
#freezedry, for when pickling is just a bit too intense
# The class key is a reserved dict key used to flag that the dict should be unpacked back out to a class instance
class_key = "<freezedried>"
# The Pack module stores some state from init to keep a list of valid packable classes
freezedryables = {}
# Decorator to mark class as packable and keep track of associated names and classes. When packed, the
# special key string "<packable>" indicates what class the current dict should be unpacked to
# name argument is the string that will identify this class in a packed dict
def freezedryable(cls, rehydrate_method=RehydrateMethod.DIRECT, name=None):
if name is None:
cls._freezedry_name = cls.__name__
else:
if isinstance(name, str):
raise Exception("freezedryable name must be a string")
cls._freezedry_name = name
cls._rehydrate_method = rehydrate_method
if cls._freezedry_name in freezedryables:
raise Exception("Duplicate freezedryable class name "+cls._freezedry_name)
freezedryables[cls._freezedry_name] = cls
def _freezedry(self):
dried_dict=_freezedry_dict(vars(self))
dried_dict[class_key]=self._freezedry_name
return dried_dict
cls.freezedry=_freezedry
#setattr(cls, "freezedry", freezedry)
return cls
def freezedry(hydrated_obj):
# If it's a primitive, store it. If it's a dict or list, recursively freezedry that.
# If it's an instance of another freezedryable class, call its .freezedry() method.
if isinstance(hydrated_obj, (str, int, float, bool, type(None))):
return hydrated_obj
elif isinstance(hydrated_obj, dict):
return _freezedry_dict(hydrated_obj)
elif isinstance(hydrated_obj, list):
dried_list = []
for val in hydrated_obj:
dried_list.append(freezedry(val))
return dried_list
elif hasattr(hydrated_obj, "_freezedry_name"):
return hydrated_obj.freezedry()
else:
raise Exception("Object "+str(hydrated_obj)+" is not freezedryable")
def _freezedry_dict(hydrated_dict):
dried_dict = {}
for k,val in hydrated_dict.items():
if not isinstance(k,str):
raise Exception("Non-string dictionary keys are not freezedryable")
if k == class_key:
raise Exception("Key "+class_key+" is reserved for internal freezedry use")
dried_dict[k]=freezedry(val)
return dried_dict
def rehydrate(dried_obj):
if isinstance(dried_obj, (str, int, float, bool, type(None))):
return dried_obj
elif isinstance(dried_obj, dict):
return _rehydrate_dict(dried_obj)
elif isinstance(dried_obj, list):
hydrated_list = []
for val in dried_obj:
hydrated_list.append(rehydrate(val))
return hydrated_list
else:
raise Exception("Object "+str(dried_obj)+" is not rehydrateable")
def _rehydrate_dict(dried_dict):
hydrated_dict = {}
for k,val in dried_dict.items():
if not isinstance(k,str):
raise Exception("Non-string dictionary keys are not rehydrateable")
if k != class_key:
hydrated_dict[k]=rehydrate(val)
# Check if this is an object that needs to be unpacked back to an instance
if class_key in dried_dict:
if dried_dict[class_key] not in freezedryables:
raise Exception("Class "+dried_dict[class_key]+" has not been decorated as freezedryable")
f_class=freezedryables[dried_dict[class_key]]
# If DIRECT, skip __init__ and set attributes back directly
if f_class._rehydrate_method == RehydrateMethod.DIRECT:
hydrated_instance = f_class.__new__(f_class)
hydrated_instance.__dict__.update(hydrated_dict)
#if INIT, pass all attributes as keywords to __init__ method
elif f_class._rehydrate_method == RehydrateMethod.INIT:
hydrated_instance = f_class(**hydrated_dict)
# IF CLASS_METHOD, pass all attributes as keyword aguments to classmethod "unpack()"
elif f_class._rehydrate_method == RehydrateMethod.CLASS_METHOD:
if inspect.ismethod(getattr(f_class, "rehydrate", None)):
hydrated_instance = f_class.rehydrate(**hydrated_dict)
else:
raise Exception("Class "+str(f_class)+" does not have classmethod 'rehydrate()'")
else:
raise Exception("Class _rehydrate_method "+str(f_class._rehydrate_method)+" is not supported")
return hydrated_instance
else:
return hydrated_dict

@ -1,84 +0,0 @@
#!/usr/bin/env python3
from contextlib import suppress
import importlib
class Hook():
def __init__(self):
self.attached_functions = []
def attach(self, new_func):
if not callable(new_func):
raise TypeError("Argument to Hook.attach must be callable")
self.attached_functions.append(new_func)
def __call__(self, *args, **kwargs):
for func in self.attached_functions:
func(*args, **kwargs)
class Module():
def __init__(self, config, core_interface):
self.config = config
self.shepherd = core_interface
#self.shepherd.scheduler
self.interface = Interface(self)
self.modules = {}
# dummy interface in case module doesn't want one
def init_other_interfaces(self, interfaces):
if not isinstance(self.interface, Interface):
raise TypeError("shepherd.module.Module interface attribute must "
"be subclass of type shepherd.module.Interface")
self.modules = interfaces
# Look at providing a run() function or similar, which is a thread started
# post_modules_setup
class SimpleModule(Module):
def __init__(self, config, core_interface):
super().__init__(config, core_interface)
self.setup()
def init_other_interfaces(self, interfaces):
super().init_other_interfaces(interfaces)
self.setup_other_modules()
# Users override this, self.shepherd and self.config are available now.
# User still needs to define self.interface if it is used.
def setup(self):
pass
def setup_other_modules(self):
pass
"""
An interface to a Shepherd module, accessible by other modules.
All public methods in a module interface need to be threadsafe, as they will
be called by other modules (which generally run in a seperate thread)
"""
class Interface():
def __init__(self, module):
self._module = module
def find_modules(module_names):
module_classes = {}
for module_name in module_names:
mod = importlib.import_module("shepherd.modules." + module_name)
attrs = [getattr(mod, name) for name in dir(mod)]
for attr in attrs:
with suppress(TypeError):
if issubclass(attr, Module):
module_classes[module_name] = attr
break
else:
raise Exception("Imported shepherd modules must contain a "
"subclass of shepherd.module.Module, such as"
"shepherd.module.SimpleModule")
return module_classes

@ -1,113 +0,0 @@
#!/usr/bin/env python3
import shepherd.config
import shepherd.module
import sys
import os
import time
import argparse
from gpiozero import OutputDevice, Device
from gpiozero.pins.pigpio import PiGPIOFactory
from shepherd.modules.betterservo import BetterServo
Device.pin_factory = PiGPIOFactory()
MOTHTRAP_LED_PIN = 6
MOTHTRAP_SERVO_PIN = 10
MOTHTRAP_SERVO_POWER_PIN = 9
class MothtrapConfDef(shepherd.config.ConfDefinition):
def __init__(self):
super().__init__()
self.add_def('servo_open_pulse', shepherd.config.IntDef(default=1200))
self.add_def('servo_closed_pulse', shepherd.config.IntDef(default=1800))
self.add_def('servo_open_time', shepherd.config.IntDef(default=5))
class MothtrapModule(shepherd.module.SimpleModule):
conf_def = MothtrapConfDef()
def setup(self):
print("Mothtrap config:")
print(self.config)
servo_max = self.config["servo_open_pulse"] / 1000000
servo_min = self.config["servo_closed_pulse"] / 1000000
if servo_min > servo_max:
servo_min, servo_max = servo_max, servo_min
self.door_servo = BetterServo(MOTHTRAP_SERVO_PIN, initial_value=None,
active_high=False,
min_pulse_width=servo_min,
max_pulse_width=servo_max)
self.door_servo_power = OutputDevice(MOTHTRAP_SERVO_POWER_PIN,
active_high=True,
initial_value=False)
self.led_power = OutputDevice(MOTHTRAP_LED_PIN,
active_high=True,
initial_value=False)
def setup_other_modules(self):
self.modules.picam.hook_pre_cam.attach(self.led_on)
self.modules.picam.hook_post_cam.attach(self.led_off)
self.modules.picam.hook_post_cam.attach(self.run_servo)
def led_on(self):
self.led_power.on()
def led_off(self):
self.led_power.off()
def run_servo(self):
self.door_servo_power.on()
time.sleep(0.5)
self.door_servo.pulse_width = self.config["servo_open_pulse"] / 1000000
time.sleep(self.config["servo_open_time"])
self.door_servo.pulse_width = self.config["servo_closed_pulse"] / 1000000
time.sleep(self.config["servo_open_time"])
self.door_servo.detach()
self.door_servo_power.off()
def main(argv):
argparser = argparse.ArgumentParser(
description='Module for mothtrap control functions. Run for testing')
argparser.add_argument("configfile", nargs='?', metavar="configfile",
help="Path to configfile", default="conf.toml")
argparser.add_argument("test_function", nargs='?', choices=['servo'],
help="test function to perform", default="servo")
argparser.add_argument("-o", help="servo open position, in us", type=int, default=1200, dest="servo_open_pulse")
argparser.add_argument("-c", help="servo closed position, in us", type=int, default=1800, dest="servo_closed_pulse")
argparser.add_argument("-w", help="wait time, in seconds", type=int, default=5, dest="servo_open_time")
args = argparser.parse_args()
confman = shepherd.config.ConfigManager()
srcdict = {"mothtrap": {"servo_open_pulse": args.servo_open_pulse,
"servo_closed_pulse":args.servo_closed_pulse,
"servo_open_time":args.servo_open_time}}
if os.path.isfile(args.configfile):
confman.load(args.configfile)
else:
confman.load(srcdict)
mothtrap_mod = MothtrapModule(confman.get_config("mothtrap", MothtrapConfDef()),
shepherd.module.Interface(None))
mothtrap_mod.led_on()
mothtrap_mod.run_servo()
mothtrap_mod.led_off()
if __name__ == "__main__":
main(sys.argv[1:])

@ -1,134 +0,0 @@
import shepherd.config
import shepherd.module
import io
import os
from datetime import datetime
import time
from picamera import PiCamera
from PIL import Image, ImageDraw, ImageFont
overlayfont = "/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf"
class PiCamConfDef(shepherd.config.ConfDefinition):
def __init__(self):
super().__init__()
self.add_def('upload_images', shepherd.config.BoolDef(default=False, optional=True))
self.add_def('upload_bucket', shepherd.config.StringDef(default="", optional=True))
self.add_def('save_directory', shepherd.config.StringDef(default="", optional=False))
self.add_def('append_text', shepherd.config.StringDef(default="", optional=True))
self.add_def('append_id', shepherd.config.BoolDef(default=True, optional=True))
array = self.add_def('trigger', shepherd.config.TableArrayDef())
array.add_def('hour', shepherd.config.StringDef())
array.add_def('minute', shepherd.config.StringDef())
array.add_def('second', shepherd.config.StringDef(default="0", optional=True))
# on server side, we want to be able to list commands that a module responds to
# without actually instantiating the module class. Add command templates into
# the conf_def, than attach to them in the interface? Was worried about having
# "two sources of truth", but you already need to match the conf_def to the
# name where you access the value in the module. Could have add_command, which
# you then add standard conf_def subclasses to, to reuse validation and server
# form generation logic...
class PiCamInterface(shepherd.module.Interface):
def __init__(self, module):
super().__init__(module)
self.hook_pre_cam = shepherd.module.Hook()
self.hook_post_cam = shepherd.module.Hook()
# self.add_command("trigger", self.module.camera_job)
# other module can then call, in init_interfaces, if self.modules.picam is not None:
# self.modules.picam.hooks.attach("pre_cam",self.myfunc)
# self.modules.picam.pre_cam.attach(self.my_func)
# self.modules.picam.trigger()
class PiCamModule(shepherd.module.SimpleModule):
conf_def = PiCamConfDef()
def setup(self):
self.interface = PiCamInterface(self)
# do some camera init stuff
print("Camera config:")
print(self.config)
if self.config["save_directory"] is "":
self.save_directory = os.path.join(self.shepherd.root_dir,
"camera")
else:
self.save_directory = self.config["save_directory"]
if not os.path.exists(self.save_directory):
os.makedirs(self.save_directory)
#global cam_led
#cam_led = LED(CAMERA_LED_PIN, active_high=False, initial_value=False)
for trigger in self.config["trigger"]:
self.shepherd.scheduler.add_job(self.camera_job, 'cron',
hour=trigger["hour"],
minute=trigger["minute"],
second=trigger["second"])
def setup_other_modules(self):
pass
def camera_job(self):
self.interface.hook_pre_cam()
print("Running camera...")
stream = io.BytesIO()
with PiCamera() as picam:
picam.resolution = (3280, 2464)
picam.start_preview()
time.sleep(2)
picam.capture(stream, format='jpeg')
# "Rewind" the stream to the beginning so we can read its content
image_time = datetime.now()
stream.seek(0)
newimage = Image.open(stream)
try:
fnt = ImageFont.truetype(overlayfont, 50)
except IOError:
fnt = ImageFont.load_default()
draw = ImageDraw.Draw(newimage)
image_text = image_time.strftime("%Y-%m-%d %H:%M:%S")
if self.config["append_id"]:
image_text = image_text + " " + self.shepherd.id
image_text = image_text + self.config["append_text"]
draw.text((50, newimage.height-100), image_text, font=fnt,
fill=(255, 255, 255, 200))
image_filename = image_time.strftime("%Y-%m-%d %H-%M-%S")
if self.config["append_id"]:
image_filename = image_filename + " " + self.shepherd.id
image_filename = image_filename + self.config["append_text"] + ".jpg"
image_filename = os.path.join(self.save_directory, image_filename)
newimage.save(image_filename+".writing", "JPEG")
os.rename(image_filename+".writing", image_filename)
if self.config["upload_images"]:
self.modules.uploader.move_to_bucket(image_filename,
self.config["upload_bucket"])
self.interface.hook_post_cam()
if __name__ == "__main__":
pass
# print("main")
# main(sys.argv[1:])

@ -0,0 +1,247 @@
#!/usr/bin/env python3
from contextlib import suppress
from abc import ABC, abstractmethod
import importlib
from types import SimpleNamespace
from collections import namedtuple
import sys
import os
import shepherd.scheduler
class Hook():
def __init__(self):
self.attached_functions = []
def attach(self, new_func):
if not callable(new_func):
raise TypeError("Argument to Hook.attach must be callable")
self.attached_functions.append(new_func)
def __call__(self, *args, **kwargs):
for func in self.attached_functions:
func(*args, **kwargs)
class InterfaceFunction():
def __init__(self, func):
if not callable(func):
raise TypeError("Argument to InterfaceFunction must be callable")
self.func = func
def __call__(self, *args, **kwargs):
return self.func(*args, **kwargs)
class Plugin(ABC):
@staticmethod
@abstractmethod
def define_config(confdef):
pass
@abstractmethod
def __init__(self, plugininterface, config):
pass
def run(self, hooks, plugins):
pass
class SimplePlugin(Plugin):
@staticmethod
def define_config(confdef):
confdef.add_def()
def __init__(self, plugininterface, config):
super().__init__(plugininterface, config)
self.config = config
self.interface = plugininterface
self.plugins = plugininterface.other_plugins
self.hooks = plugininterface.hooks
plugin_interfaces = {} # dict of plugin interfaces
# convenience dicts bundling together lists from interfaces
plugin_functions = {} # dict of plugins containing callable interface functions
plugin_hooks = {} # dict of plugins containing hook namespaces
_defer = True
_deferred_attachments = []
_deferred_jobs = []
def init_plugins(plugin_classes, plugin_configs, core_config):
# Startup pluginmanagers
global plugin_interfaces
global plugin_functions
global plugin_hooks
global _defer
global _deferred_attachments
global _deferred_jobs
for name, plugin_class in plugin_classes.items():
# Instanciate the plugin interface - this also instanciates
# the plugin
plugin_interfaces[name] = PluginInterface(
name, plugin_class, plugin_configs[name], core_config)
plugin_functions[name] = plugin_interfaces[name].functions
plugin_hooks[name] = plugin_interfaces[name].hooks
# interfaces and hooks should now be populated, attach hooks, schedule jobs
_defer = False
for attachment in _deferred_attachments:
_attach_hook(attachment)
for job_desc in _deferred_jobs:
_add_job(job_desc)
# Hand shared interface callables back out
for plugininterface in plugin_interfaces.values():
plugininterface.functions = plugin_functions
def _add_job(job_desc):
global _deferred_jobs
global _defer
if not _defer:
shepherd.scheduler.schedule_job(job_desc)
else:
_deferred_jobs.append(job_desc)
def _attach_hook(attachment):
global plugin_hooks
global _deferred_attachments
global _defer
if not _defer:
targetplugin_hooks = plugin_hooks.get(attachment.pluginname)
if targetplugin_hooks is not None:
targethook = getattr(targetplugin_hooks, attachment.hookname)
if targethook is not None:
targethook.attach(attachment.func)
else:
raise Exception("Could not find hook '" +
attachment.hookname+"' in module '"+attachment.pluginname+"'")
else:
raise Exception(
"Cannot attach hook to non-existing module '"+attachment.pluginname+"'")
else:
_deferred_attachments.append(attachment)
# Eventually, would like to be able to have client plugin simply:
# self.shepherd.add_job(trigger, self.interface.myfunc)
# self.shepherd.attach_hook(pluginnanme,hookname, callable)
# self.shepherd.addinterface
# self.shepherd.hooks.myhook()
# self.shepherd.plugins.otherplugin.otherinterface()
# self.shepherd.add_job()
# Would be good to be able to use abstract methods to enable simpler plugin config
# defs. A way to avoid instantiating the class would be to run it all as class methods,
# enabling
HookAttachment = namedtuple(
'HookAttachment', ['pluginname', 'hookname', 'func'])
class PluginInterface():
# Class to handle the management of a single plugin.
# All interaction to or from the plugin to other Shepherd components or
# plugins should go through here.
def __init__(self, pluginname, pluginclass, pluginconfig, coreconfig):
if not issubclass(pluginclass, Plugin):
raise TypeError(
"Argument must be subclass of shepherd.plugin.Plugin")
self.coreconfig = coreconfig
self.hooks = SimpleNamespace() # My hooks
self.functions = SimpleNamespace() # My callable interface functions
self._name = pluginname
self._plugin = pluginclass(self, pluginconfig)
def register_hook(self, name):
setattr(self.hooks, name, Hook())
def register_function(self, func):
setattr(self.functions, func.__name__, InterfaceFunction(func))
@property
def other_plugins(self):
global plugin_functions
return plugin_functions
def attach_hook(self, pluginname, hookname, func):
_attach_hook(HookAttachment(pluginname, hookname, func))
# Add a job to the scheduler. By default each will be identified by the interface
# callable name, and so adding another job with the same callable will oevrride the first.
# Use the optional job_name to differentiate jobs with an extra string
def add_job(self, func, hour, minute, second=0, job_name=""):
for function_name, function in self.functions.__dict__.items():
if func == function.func:
# jobstring should canonically describe this job, to be retrieved
# after reboot later. Of the form:
# shepherd:pluginname:functionname:jobname
jobstring = "shepherd:"+self._name+":"+function_name+":"+job_name
_add_job(shepherd.scheduler.JobDescription(jobstring, hour, minute, second))
break
else:
raise Exception(
"Could not add job. Callable must first be registered as "
"a plugin interface with PluginInterface.register_function()")
"""
An interface to a Shepherd module, accessible by other modules.
All public methods in a module interface need to be threadsafe, as they will
be called by other modules (which generally run in a seperate thread)
"""
def find_plugins(plugin_names, plugin_dir=None):
plugin_classes = {}
for plugin_name in plugin_names:
# First look for core plugins, then the plugin_dir, then in the general import path
# for custom ones prefixed with "shepherd_"
try:
#mod = importlib.import_module("shepherd.plugins." + plugin_name)
mod = importlib.import_module('.'+plugin_name, "shepherd.plugins")
except ModuleNotFoundError:
try:
if (plugin_dir is not None) and (plugin_dir != ""):
if os.path.isdir(plugin_dir):
sys.path.append(plugin_dir)
mod = importlib.import_module("shepherd_" + plugin_name)
sys.path.remove(plugin_dir)
else:
raise Exception("plugin_dir is not a valid directory")
else:
mod = importlib.import_module("shepherd_" + plugin_name)
except ModuleNotFoundError:
raise Exception("Could not find plugin "+plugin_name)
# Scan imported module for Plugin subclass
attrs = [getattr(mod, name) for name in dir(mod)]
for attr in attrs:
with suppress(TypeError):
if issubclass(attr, Plugin):
plugin_classes[plugin_name] = attr
break
else:
raise Exception("Imported shepherd plugin modules must contain a "
"subclass of shepherd.plugin.Plugin, such as"
"shepherd.plugin.SimplePlugin")
return plugin_classes

@ -65,7 +65,7 @@ def main(argv):
else: else:
confman.load(srcdict) confman.load(srcdict)
aphidtrap_mod = AphidtrapModule(confman.get_config("aphid", AphidtrapConfDef()), aphidtrap_mod = AphidtrapModule(confman.get_config("aphidtrap", AphidtrapConfDef()),
shepherd.module.Interface(None)) shepherd.module.Interface(None))
aphidtrap_mod.led_on() aphidtrap_mod.led_on()

@ -0,0 +1,93 @@
#!/usr/bin/env python3
import shepherd.config as shconf
import shepherd.plugin
import sys
import os
import time
import argparse
from gpiozero import OutputDevice, Device
from gpiozero.pins.pigpio import PiGPIOFactory
from shepherd.plugins.betterservo import BetterServo
Device.pin_factory = PiGPIOFactory()
MOTHTRAP_LED_PIN = 6
MOTHTRAP_SERVO_PIN = 10
MOTHTRAP_SERVO_POWER_PIN = 9
class MothtrapPlugin(shepherd.plugin.Plugin):
@staticmethod
def define_config(confdef):
confdef.add_def('servo_open_pulse', shconf.IntDef(default=1200, minval=800, maxval=2200))
confdef.add_def('servo_closed_pulse', shconf.IntDef(default=1800, minval=800, maxval=2200))
confdef.add_def('servo_open_time', shconf.IntDef(default=5))
def __init__(self, pluginInterface, config):
super().__init__(pluginInterface, config)
self.config = config
self.interface = pluginInterface
self.plugins = pluginInterface.other_plugins
self.hooks = pluginInterface.hooks
self.root_dir = os.path.expanduser(pluginInterface.coreconfig["root_dir"])
self.id = pluginInterface.coreconfig["id"]
print("Mothtrap config:")
print(self.config)
servo_max = self.config["servo_open_pulse"] / 1000000
servo_min = self.config["servo_closed_pulse"] / 1000000
if servo_min > servo_max:
servo_min, servo_max = servo_max, servo_min
self.door_servo = BetterServo(MOTHTRAP_SERVO_PIN, initial_value=None,
active_high=False,
min_pulse_width=servo_min-0.000001,
max_pulse_width=servo_max+0.000001)
print(F"Supplied min: {servo_min}, max: {servo_max}")
self.door_servo_power = OutputDevice(MOTHTRAP_SERVO_POWER_PIN,
active_high=True,
initial_value=False)
self.led_power = OutputDevice(MOTHTRAP_LED_PIN,
active_high=True,
initial_value=False)
self.interface.attach_hook("picam", "pre_cam", self.led_on)
self.interface.attach_hook("picam", "post_cam", self.led_off)
self.interface.attach_hook("picam", "post_cam", self.run_servo)
self.interface.register_function(self.test)
def led_on(self):
self.led_power.on()
def led_off(self):
self.led_power.off()
def run_servo(self):
self.door_servo_power.on()
time.sleep(0.5)
self.door_servo.pulse_width = self.config["servo_open_pulse"] / 1000000
time.sleep(self.config["servo_open_time"])
self.door_servo.pulse_width = self.config["servo_closed_pulse"] / 1000000
time.sleep(self.config["servo_open_time"])
self.door_servo.detach()
self.door_servo_power.off()
def test(self):
self.led_on()
time.sleep(1)
self.led_off()
self.run_servo()

@ -0,0 +1,174 @@
import io
import os
from datetime import datetime
import time
import shepherd.config as shconf
import shepherd.plugin
from picamera import PiCamera
from PIL import Image, ImageDraw, ImageFont
asset_dir = os.path.dirname(os.path.realpath(__file__))
overlayfont_filename = os.path.join(asset_dir, "DejaVuSansMono.ttf")
logo_filename = os.path.join(asset_dir, "smallshepherd.png")
# on server side, we want to be able to list commands that a module responds to
# without actually instantiating the module class. Add command templates into
# the conf_def, than attach to them in the interface? Was worried about having
# "two sources of truth", but you already need to match the conf_def to the
# name where you access the value in the module. Could have add_command, which
# you then add standard conf_def subclasses to, to reuse validation and server
# form generation logic...
class PiCamPlugin(shepherd.plugin.Plugin):
@staticmethod
def define_config(confdef):
confdef.add_def('upload_images', shconf.BoolDef(default=False, optional=True,
helptext="If true, move to an Uploader bucket. Requires Uploader plugin"))
confdef.add_def('upload_bucket', shconf.StringDef(default="", optional=True,
helptext="Name of uploader bucket to shift images to."))
confdef.add_def('save_directory', shconf.StringDef(default="", optional=True,
helptext="Name of directory path to save images. If empty, a 'usbcamera' directory under the Shepherd root dir will be used"))
confdef.add_def('append_id', shconf.BoolDef(default=True, optional=True,
helptext="If true, add the system ID to the end of image filenames"))
confdef.add_def('show_overlay', shconf.BoolDef(default=True, optional=True,
helptext="If true, add an overlay on each image with the system ID and date."))
confdef.add_def('overlay_desc', shconf.StringDef(default="", optional=True,
helptext="Text to add to the overlay after the system ID and camera name"))
confdef.add_def('jpeg_quality', shconf.IntDef(default=80, minval=60, maxval=95, optional=True,
helptext="JPEG quality to save with. Max of 95, passed directly to Pillow"))
array = confdef.add_def('trigger', shconf.TableArrayDef(
helptext="Array of triggers that will use all cameras"))
array.add_def('hour', shconf.StringDef())
array.add_def('minute', shconf.StringDef())
array.add_def('second', shconf.StringDef(default="0", optional=True))
def __init__(self, pluginInterface, config):
super().__init__(pluginInterface, config)
self.config = config
self.interface = pluginInterface
self.plugins = pluginInterface.other_plugins
self.hooks = pluginInterface.hooks
self.root_dir = os.path.expanduser(pluginInterface.coreconfig["root_dir"])
self.id = pluginInterface.coreconfig["id"]
self.interface.register_hook("pre_cam")
self.interface.register_hook("post_cam")
self.interface.register_function(self.camera_job)
# do some camera init stuff
print("PiCamera config:")
print(self.config)
# Seconds to wait for exposure and white balance auto-adjust to stabilise
self.stabilise_delay = 3
if self.config["save_directory"] is "":
self.save_directory = os.path.join(self.root_dir, "picamera")
else:
self.save_directory = self.config["save_directory"]
if not os.path.exists(self.save_directory):
os.makedirs(self.save_directory)
if self.config["show_overlay"]:
# Load assets
self.logo_im = Image.open(logo_filename)
self.font_size_cache = {}
self.logo_size_cache = {}
#global cam_led
#cam_led = LED(CAMERA_LED_PIN, active_high=False, initial_value=False)
for trigger in self.config["trigger"]:
trigger_id = trigger["hour"]+'-' + trigger["minute"]+'-'+trigger["second"]
self.interface.add_job(
self.camera_job, trigger["hour"], trigger["minute"], trigger["second"], job_name=trigger_id)
def _generate_overlay(self, width, height, image_time):
font_size = int(height/40)
margin_size = int(font_size/5)
if font_size not in self.font_size_cache:
self.font_size_cache[font_size] = ImageFont.truetype(
overlayfont_filename, int(font_size*0.9))
thisfont = self.font_size_cache[font_size]
if font_size not in self.logo_size_cache:
newsize = (int(self.logo_im.width*(
font_size/self.logo_im.height)), font_size)
self.logo_size_cache[font_size] = self.logo_im.resize(
newsize, Image.BILINEAR)
thislogo = self.logo_size_cache[font_size]
desc_text = self.config["overlay_desc"]
if self.config["append_id"]:
desc_text = self.id + " " + desc_text
time_text = image_time.strftime("%Y-%m-%d %H:%M:%S")
overlay = Image.new('RGBA', (width, font_size+(2*margin_size)), (0, 0, 0))
overlay.paste(thislogo, (int((overlay.width-thislogo.width)/2), margin_size))
draw = ImageDraw.Draw(overlay)
draw.text((margin_size*2, margin_size), desc_text,
font=thisfont, fill=(255, 255, 255, 255))
datewidth, _ = draw.textsize(time_text, thisfont)
draw.text((overlay.width-(margin_size*2)-datewidth, margin_size), time_text, font=thisfont,
fill=(255, 255, 255, 255))
# make whole overlay half transparent
overlay.putalpha(128)
return overlay
def camera_job(self):
self.hooks.pre_cam()
#Capture image
print("Running camera...")
stream = io.BytesIO()
with PiCamera() as picam:
picam.resolution = (3280, 2464)
picam.start_preview()
time.sleep(self.stabilise_delay)
picam.capture(stream, format='jpeg')
# "Rewind" the stream to the beginning so we can read its content
stream.seek(0)
img = Image.open(stream)
#Process image
image_time = datetime.now()
if self.config["show_overlay"]:
overlay = self._generate_overlay(img.width, img.height, image_time)
img.paste(overlay, (0, img.height-overlay.height), overlay)
image_filename = image_time.strftime("%Y-%m-%d %H-%M-%S")
if self.config["append_id"]:
image_filename = image_filename + " " + self.id
image_filename = image_filename + ".jpg"
image_filename = os.path.join(self.save_directory, image_filename)
img.save(image_filename+".writing", "JPEG", quality=self.config["jpeg_quality"])
os.rename(image_filename+".writing", image_filename)
if self.config["upload_images"]:
self.plugins["uploader"].move_to_bucket(image_filename, self.config["upload_bucket"])
self.hooks.post_cam()
if __name__ == "__main__":
pass
# print("main")
# main(sys.argv[1:])

@ -0,0 +1,285 @@
#!/usr/bin/env python3
import shepherd.config as shconf
import shepherd.plugin
import queue
import threading
import re
import serial
import time
from enum import Enum, auto
from collections import namedtuple
class Command(Enum):
BATV = "batv"
BATI = "bati"
TIME = "time"
ALARM = "alarm"
AUX5V = "aux5v"
PWM1 = "pwm1"
PWM2 = "pwm2"
OUT1 = "out1"
OUT2 = "out2"
class CommandRequest():
def __init__(self, command, arglist):
self.command = command
self.arglist = arglist
self.response_args = []
# event that is triggered when a matching command response is
# recieved and the data parsed into ScoutState. ScoutState should
# only be then read via the lock on it.
self.responded = threading.Event()
ScoutState = namedtuple(
'ScoutState', ['batv',
'bati',
'out1',
'out2',
'pwm1',
'pwm2', ])
# Plugin to interface with the shepherd scout pcb modules.
# A seperate thread is started to handle the comms with the
# supervising microcontroller. Interface functions add a request
# to the queue, and some may wait for a state to be updated before
# returning (with a timeout).
# Command Requests (things to be added to the queue) should only be generated
# internally from this plugin - interface functions are provided to wrap
# them and optionally return some changed state
# The request object (namedTuple?) contains the request string,
class CommandHandler():
def __init__(self, config):
self._command_queue = queue.Queue()
self.port = serial.Serial()
self.current_request = None
self.curr_request_sent_time = None
self.request_timeout = 2
self.config = config
self._rx_string = ""
self.port.baudrate = 57600
self.port.timeout = 0
self.port.port = self.config["serialport"]
# pull in serial config
# start thread
self.thread = threading.Thread(target=self._serial_comm_thread, daemon=True)
self.thread.start()
def request(self, newRequest):
self._command_queue.put(newRequest)
def _send_request(self):
argstr = ""
if len(self.current_request.arglist) > 0:
argstr = ':'+','.join(self.current_request.arglist)
send_str = '?'+self.current_request.command.value+argstr+'\n'
self.port.write(send_str.encode('utf-8'))
self.curr_request_sent_time = time.time()
def _process_bytes(self, new_bytes):
self._rx_string = self._rx_string + new_bytes.decode('utf-8')
#print(self._rx_string)
if ('!' in self._rx_string) or ('?' in self._rx_string):
match = re.search(r"([!\?])(.+?)[\r\n]", self._rx_string)
while match is not None:
self._parse_command(match[2])
# only keep part of string after our match and look for another
# command
self._rx_string = self._rx_string[match.end():]
match = re.search(r"[!\?].+?\n", self._rx_string)
else:
# No command start characters anywhere in the string, so ditch it
self._rx_string = ""
def _parse_command(self, command_str):
command_name, _, command_args = command_str.partition(':')
if self.current_request is not None:
if command_name == self.current_request.command.value:
self.current_request.response_args = command_args.split(',')
self.current_request.responded.set()
self.current_request = None
def _handle_serial_port(self):
while True:
if self.port.in_waiting > 0:
self._process_bytes(self.port.read(self.port.in_waiting))
elif self.current_request is None:
try:
self.current_request = self._command_queue.get(
block=True, timeout=0.01)
self._send_request()
except queue.Empty:
pass
else:
if (time.time()-self.curr_request_sent_time) > self.request_timeout:
#Timeout the request
self.current_request = None
else:
pass
#Ideally have a short spin loop delay here, but could cause serial buffer to back up
#pyserial buffer is apparently 1024 or 4096 bytes, and at 57600 baud 10ms delay would only
# be 72 bytes.
time.sleep(0.01)
def _serial_comm_thread(self):
while True:
# Producers-consumer pattern
# If there's a CommandRequest to on the queue, pop it and set as current
# Send the command, and wait for a matching response
# timeout the response wait if necessary
# Parse any response and update the state
# notify the queue responded event
# Actual wait is on either the non-empty queue or a serial character to parse
# Serial comms is not synchronous, so need to be available to recieve characters
# at any point
try:
self.port.open()
self._handle_serial_port()
except serial.SerialException:
time.sleep(1)
# If there's bytes, read them and deal with them. If there's
# a serialexception, try to reopen the port
# If no bytes,
class ScoutPlugin(shepherd.plugin.Plugin):
@staticmethod
def define_config(confdef):
confdef.add_def('boardver', shconf.StringDef())
confdef.add_def('serialport', shconf.StringDef())
def __init__(self, pluginInterface, config):
super().__init__(pluginInterface, config)
self.config = config
self.interface = pluginInterface
self.plugins = pluginInterface.other_plugins
self.hooks = pluginInterface.hooks
self.cmd_handler = CommandHandler(config)
self.interface.register_function(self.get_batv)
self.interface.register_function(self.get_bati)
self.interface.register_function(self.get_time)
self.interface.register_function(self.set_alarm)
self.interface.register_function(self.set_aux5v)
self.interface.register_function(self.set_pwm1)
self.interface.register_function(self.set_pwm2)
self.interface.register_function(self.set_out1)
self.interface.register_function(self.set_out2)
self.interface.register_function(self.test)
def get_batv(self):
req = CommandRequest(Command.BATV, [])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def get_bati(self):
req = CommandRequest(Command.BATI, [])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def set_aux5v(self, enabled):
req = CommandRequest(Command.AUX5V, [str(enabled).lower()])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def set_pwm1(self, enabled, pulse_length):
req = CommandRequest(Command.PWM1, [str(enabled).lower(), str(pulse_length)])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def set_pwm2(self, enabled, pulse_length):
req = CommandRequest(Command.PWM2, [str(enabled).lower(), str(pulse_length)])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def set_out1(self, enabled):
req = CommandRequest(Command.OUT1, [str(enabled).lower()])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def set_out2(self, enabled):
req = CommandRequest(Command.OUT2, [str(enabled).lower()])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def get_time(self):
req = CommandRequest(Command.TIME, [])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def set_alarm(self, unix_time):
req = CommandRequest(Command.ALARM, [unix_time])
self.cmd_handler.request(req)
if req.responded.wait(2):
return req.response_args[0]
return None
def test(self):
print("Testing companion board...")
print(F"Current RTC time is {self.get_time()}")
print(F"Current BatV is {self.get_batv()}")
print(F"Current BatI is {self.get_bati()}")
print("Turning on Out1 for 1 second")
self.set_out1(True)
time.sleep(1)
self.set_out1(False)
print("Turning on Out2 for 1 second")
self.set_out2(True)
time.sleep(1)
self.set_out2(False)
print("Enabling auxilliary 5V")
self.set_aux5v(True)
print("Sweeping PWM1 from 1000us to 2000us")
self.set_pwm1(True, 1000)
time.sleep(1)
self.set_pwm1(True, 2000)
time.sleep(1)
self.set_pwm1(False, 1000)
print("Sweeping PWM2 from 1000us to 2000us")
self.set_pwm2(True, 1000)
time.sleep(1)
self.set_pwm2(True, 2000)
time.sleep(1)
self.set_pwm2(False, 1000)
self.set_aux5v(False)
print("Test finished")
return None

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

@ -1,9 +1,11 @@
#!/usr/bin/env python3
import shutil import shutil
import os import os
import threading import threading
import paramiko import paramiko
import shepherd.config import shepherd.config as shconf
import shepherd.module import shepherd.plugin
# configdef = shepherd.config.definition() # configdef = shepherd.config.definition()
# Can either import shepherd.config here, and call a function to build a config_def # Can either import shepherd.config here, and call a function to build a config_def
@ -11,28 +13,6 @@ import shepherd.module
# probably go with entry point, to stay consistent with the module # probably go with entry point, to stay consistent with the module
class UploaderConfDef(shepherd.config.ConfDefinition):
def __init__(self):
super().__init__()
dests = self.add_def('destination', shepherd.config.TableArrayDef())
dests.add_def('name', shepherd.config.StringDef())
dests.add_def('protocol', shepherd.config.StringDef())
dests.add_def('address', shepherd.config.StringDef(optional=True))
dests.add_def('port', shepherd.config.IntDef(optional=True))
dests.add_def('path', shepherd.config.StringDef(optional=True))
dests.add_def('username', shepherd.config.StringDef(optional=True))
dests.add_def('password', shepherd.config.StringDef(optional=True))
dests.add_def('keyfile', shepherd.config.StringDef(default="", optional=True))
dests.add_def('add_id_to_path', shepherd.config.BoolDef(default=True, optional=True))
buckets = self.add_def('bucket', shepherd.config.TableArrayDef())
buckets.add_def('name', shepherd.config.StringDef())
buckets.add_def('open_link_on_new', shepherd.config.BoolDef())
buckets.add_def('opportunistic', shepherd.config.BoolDef(default=True, optional=True))
buckets.add_def('keep_copy', shepherd.config.BoolDef())
buckets.add_def('destination', shepherd.config.StringDef())
# on server side, we want to be able to list commands that a module responds to # on server side, we want to be able to list commands that a module responds to
# without actually instantiating the module class. Add command templates into # without actually instantiating the module class. Add command templates into
# the conf_def, than attach to them in the interface? Was worried about having # the conf_def, than attach to them in the interface? Was worried about having
@ -42,28 +22,18 @@ class UploaderConfDef(shepherd.config.ConfDefinition):
# form generation logic... # form generation logic...
class UploaderInterface(shepherd.module.Interface): # The uploader plugin allows the definition of upload "buckets" - essentially
def __init__(self, module): # as a way of collecting together settings for upload, timing, and retention settings
super().__init__(module) # Buckets will ignore any filenames ending with ".writing" or ".uploaded"
# self.add_command("trigger", self.module.camera_job) # The "move_to_bucket" interface function is provided, but bucket directories will
def _get_bucket_path(self, bucket_name): # also work with any files moved into them externally.
return self._module.buckets[bucket_name].path
def move_to_bucket(self, filepath, bucket_name):
dest_path = os.path.join(self._get_bucket_path(bucket_name),
os.path.basename(filepath))
temp_dest_path = dest_path + ".writing"
shutil.move(filepath, temp_dest_path)
os.rename(temp_dest_path, dest_path)
# notify bucket to check for new files
self._module.buckets[bucket_name].newfile_event.set()
class Destination(): class Destination():
def __init__(self, config, core_interface): def __init__(self, config, node_id, root_dir):
self.config = config self.config = config
self.shepherd = core_interface self.node_id = node_id
self.root_dir = root_dir
self.sendlist_condition = threading.Condition() self.sendlist_condition = threading.Condition()
self.send_list = [] self.send_list = []
@ -73,7 +43,7 @@ class Destination():
# Override this in subclasses, implementing the actual upload process. # Override this in subclasses, implementing the actual upload process.
# Return true on success, false on failure. # Return true on success, false on failure.
def upload(self, filepath, suffix): def upload(self, filepath, suffix):
print ("Dummy uploading "+filepath) print("Dummy uploading "+filepath)
return True return True
def add_files_to_send(self, file_path_list): def add_files_to_send(self, file_path_list):
@ -102,22 +72,25 @@ class Destination():
try: try:
self.upload(file_to_send, ".uploading") self.upload(file_to_send, ".uploading")
os.rename(file_to_send+".uploading", file_to_send+".uploaded") os.rename(file_to_send+".uploading", file_to_send+".uploaded")
except: except Exception as e:
print(F"Upload failed with exception {e}")
os.rename(file_to_send+".uploading", file_to_send) os.rename(file_to_send+".uploading", file_to_send)
self.send_list.append(file_to_send) self.send_list.append(file_to_send)
class SFTPDestination(Destination): class SFTPDestination(Destination):
def upload(self, filepath, suffix): def upload(self, filepath, suffix):
print("Starting upload...")
with paramiko.Transport((self.config["address"], with paramiko.Transport((self.config["address"],
self.config["port"])) as transport: self.config["port"])) as transport:
transport.connect(username=self.config["username"], transport.connect(username=self.config["username"],
password=self.config["password"]) password=self.config["password"])
with paramiko.SFTPClient.from_transport(transport) as sftp: with paramiko.SFTPClient.from_transport(transport) as sftp:
print("Uploading "+filepath+" to "+self.config["address"]+" via SFTP") print("Uploading "+filepath+" to " +
self.config["address"]+" via SFTP")
if self.config["add_id_to_path"]: if self.config["add_id_to_path"]:
destdir = os.path.join(self.config["path"], destdir = os.path.join(self.config["path"],
self.shepherd.id) self.node_id)
else: else:
destdir = self.config["path"] destdir = self.config["path"]
@ -134,23 +107,26 @@ class SFTPDestination(Destination):
class Bucket(): class Bucket():
def __init__(self, name, open_link_on_new, opportunistic, keep_copy, def __init__(self, name, open_link_on_new, opportunistic, keep_copy,
destination, core_interface, path=None, old_path=None): destination, node_id, root_dir, path=None, old_path=None):
self.newfile_event = threading.Event() self.newfile_event = threading.Event()
self.newfile_event.set() self.newfile_event.set()
self.node_id = node_id
self.root_dir = root_dir
self.destination = destination self.destination = destination
self.shepherd = core_interface
self.path = path self.path = path
if self.path is None: if self.path is None:
self.path = os.path.join(self.shepherd.root_dir, name) self.path = os.path.join(self.root_dir, name)
if not os.path.exists(self.path): if not os.path.exists(self.path):
os.makedirs(self.path) os.makedirs(self.path)
if keep_copy: if keep_copy:
self.old_path = old_path self.old_path = old_path
if self.old_path is None: if self.old_path is None:
self.old_path = os.path.join(self.shepherd.root_dir, name + "_old") self.old_path = os.path.join(
self.root_dir, name + "_old")
if not os.path.exists(self.old_path): if not os.path.exists(self.old_path):
os.makedirs(self.old_path) os.makedirs(self.old_path)
@ -160,51 +136,99 @@ class Bucket():
def _check_files(self): def _check_files(self):
while True: while True:
# NOTE: The reason we use an event here, rather than a lock or condition
# is that we're not sharing any internal state between the threads - just
# the filesystem itself and using the atomicity of file operations. While
# less clean in a pure python sense, this allows for more flexibility in
# allowing other sources of files
self.newfile_event.wait(timeout=10) self.newfile_event.wait(timeout=10)
self.newfile_event.clear() self.newfile_event.clear()
bucket_files = [] bucket_files = []
for item in os.listdir(self.path): for item in os.listdir(self.path):
item_path = os.path.join(self.path, item) item_path = os.path.join(self.path, item)
if (os.path.isfile(item_path) and if (os.path.isfile(item_path) and
(not item.endswith(".writing")) and (not item.endswith(".writing")) and
(not item.endswith(".uploading")) and (not item.endswith(".uploading")) and
(not item.endswith(".uploaded"))): (not item.endswith(".uploaded"))):
bucket_files.append(item_path) bucket_files.append(item_path)
#TODO check for .uploaded files and either delete or
# if keep_copy, move to self.old_path
if bucket_files: if bucket_files:
self.destination.add_files_to_send(bucket_files) self.destination.add_files_to_send(bucket_files)
class UploaderModule(shepherd.module.Module): class UploaderPlugin(shepherd.plugin.Plugin):
conf_def = UploaderConfDef() @staticmethod
def define_config(confdef):
dests = confdef.add_def('destination', shconf.TableArrayDef())
dests.add_def('name', shconf.StringDef())
dests.add_def('protocol', shconf.StringDef())
dests.add_def('address', shconf.StringDef(optional=True))
dests.add_def('port', shconf.IntDef(optional=True))
dests.add_def('path', shconf.StringDef(optional=True))
dests.add_def('username', shconf.StringDef(optional=True))
dests.add_def('password', shconf.StringDef(optional=True))
dests.add_def('keyfile', shconf.StringDef(
default="", optional=True))
dests.add_def('add_id_to_path', shconf.BoolDef(
default=True, optional=True))
buckets = confdef.add_def('bucket', shconf.TableArrayDef())
buckets.add_def('name', shconf.StringDef())
buckets.add_def('open_link_on_new', shconf.BoolDef())
buckets.add_def('opportunistic', shconf.BoolDef(
default=True, optional=True))
buckets.add_def('keep_copy', shconf.BoolDef())
buckets.add_def('destination', shconf.StringDef())
def __init__(self, pluginInterface, config):
super().__init__(pluginInterface, config)
self.config = config
self.interface = pluginInterface
self.plugins = pluginInterface.other_plugins
self.hooks = pluginInterface.hooks
def __init__(self, config, core_interface): self.root_dir = os.path.expanduser(pluginInterface.coreconfig["root_dir"])
super().__init__(config, core_interface) self.id = pluginInterface.coreconfig["id"]
print("Uploader config:") print("Uploader config:")
print(self.config) print(self.config)
self.interface = UploaderInterface(self) self.interface.register_function(self.move_to_bucket)
self.destinations = {} self.destinations = {}
self.buckets = {} self.buckets = {}
for dest_conf in self.config["destination"]: for dest_conf in self.config["destination"]:
if dest_conf["protocol"] == "sftp": if dest_conf["protocol"] == "sftp":
self.destinations[dest_conf["name"]] = SFTPDestination(dest_conf, core_interface) self.destinations[dest_conf["name"]] = SFTPDestination(
dest_conf, self.id, self.root_dir)
else: else:
self.destinations[dest_conf["name"]] = Destination(dest_conf, core_interface) self.destinations[dest_conf["name"]] = Destination(
dest_conf, self.id, self.root_dir)
for bucketconf in self.config["bucket"]: for bucketconf in self.config["bucket"]:
bucketconf["destination"] = self.destinations[bucketconf["destination"]] bucketconf["destination"] = self.destinations[bucketconf["destination"]]
self.buckets[bucketconf["name"]] = Bucket( self.buckets[bucketconf["name"]] = Bucket(
**bucketconf, core_interface=self.shepherd) **bucketconf, node_id=self.id, root_dir=self.root_dir)
def move_to_bucket(self, filepath, bucket_name):
# use intermediary step with ".writing" on the filename
# in case the source isn't in the same filesystem and so the
# move operation might not be atomic. Once it's there, the rename
# _is_ atomic
dest_path = os.path.join(self.buckets[bucket_name].path,
os.path.basename(filepath))
temp_dest_path = dest_path + ".writing"
shutil.move(filepath, temp_dest_path)
os.rename(temp_dest_path, dest_path)
def init_other_modules(self, interfaces): # pylint: disable=W0235 # notify bucket to check for new files
super().init_other_modules(interfaces) self.buckets[bucket_name].newfile_event.set()
if __name__ == "__main__": if __name__ == "__main__":
pass pass
#print("main") # print("main")
#main(sys.argv[1:]) # main(sys.argv[1:])

@ -0,0 +1,325 @@
import io
import os
from datetime import datetime
import time
import re
import shepherd.config as shconf
import shepherd.plugin
import threading
import subprocess
from collections import namedtuple, OrderedDict
from operator import itemgetter
import cv2
from PIL import Image, ImageDraw, ImageFont
asset_dir = os.path.dirname(os.path.realpath(__file__))
overlayfont_filename = os.path.join(asset_dir, "DejaVuSansMono.ttf")
logo_filename = os.path.join(asset_dir, "smallshepherd.png")
# Note: Add a lock to the gstreamer function, to avoid multiple triggers colliding
CameraPort = namedtuple(
'CameraPort', ['usbPath', 'devicePath'])
# Short wrapper to allow use in a ``with`` context
class VideoCaptureCtx():
def __init__(self, *args, **kwargs):
self.capture_dev = cv2.VideoCapture(*args, **kwargs)
def __enter__(self):
return self.capture_dev
def __exit__(self, *args):
self.capture_dev.release()
def get_connected_cameras():
# This will return devices orderd by the USB path, regardless of the order they're connected in
device_list_str = subprocess.run(
['v4l2-ctl', '--list-devices'], text=True, stdout=subprocess.PIPE).stdout
# in each match, first group is the USB path, second group is the device path
portlist = re.findall(r"-([\d.]+?)\):\n\s*?(\/dev\S+?)\n", device_list_str)
return [CameraPort(*port) for port in portlist]
def get_capture_formats(video_device):
"""
Call ``v4l2-ctl --device {video_device} --list-formats-ext`` and parse the output into a format dict
Returns a dict with 4CC format codes as keys, and lists of (width,height) tuples as values
"""
device_fmt_str = subprocess.run(
['v4l2-ctl', '--device', F'{video_device}', '--list-formats-ext'], text=True, stdout=subprocess.PIPE).stdout
split_fmts = re.split(r"\[\d\]: '(\w{4}).*", device_fmt_str)
if len(split_fmts) < 3:
raise Exception("Did not get valid device format list output")
# Iterate through successive pairs in the split, where the first is the format mode and the
# second is the text containing all the resolution options. Skip the first bit, which is rubbish
format_dict = {}
for fourcc, size_text in zip(split_fmts[1::2], split_fmts[2::2]):
resolutions = re.findall(r"(\d+?)x(\d+?)\D", size_text)
format_dict[fourcc] = resolutions
return format_dict
def get_largest_resolution(size_list):
"""
Accepts a list of tuples where the first element is a width and the second is a height.
Returns a single resolution tuple representing the largest area from the list
"""
return max(size_list, key=lambda size: int(size[0]*int(size[1])))
def set_camera_format_v4l2(video_device, fourcc, width, height):
"""
Set the camera device capture format using the external v4l2-ctl tool
"""
subprocess.run(['v4l2-ctl', '--device', F'{video_device}',
F'--set-fmt-video width={width},height={height},pixelformat={fourcc}'], text=True)
def set_camera_format_opencv(capture_device, fourcc, width, height):
"""
Set the camera device capture format using internal OpenCV set methods
"""
# VideoWriter_fourcc expects a list of characters, so need to unpack the string
capture_device.set(cv2.CAP_PROP_FOURCC, cv2.VideoWriter_fourcc(*fourcc))
capture_device.set(cv2.CAP_PROP_FRAME_WIDTH, int(width))
capture_device.set(cv2.CAP_PROP_FRAME_HEIGHT, int(height))
class USBCamPlugin(shepherd.plugin.Plugin):
@staticmethod
def define_config(confdef):
confdef.add_def('upload_images', shconf.BoolDef(default=False, optional=True,
helptext="If true, move to an Uploader bucket. Requires Uploader plugin"))
confdef.add_def('upload_bucket', shconf.StringDef(default="", optional=True,
helptext="Name of uploader bucket to shift images to."))
confdef.add_def('save_directory', shconf.StringDef(default="", optional=True,
helptext="Name of directory path to save images. If empty, a 'usbcamera' directory under the Shepherd root dir will be used"))
confdef.add_def('append_id', shconf.BoolDef(default=True, optional=True,
helptext="If true, add the system ID to the end of image filenames"))
confdef.add_def('show_overlay', shconf.BoolDef(default=True, optional=True,
helptext="If true, add an overlay on each image with the system ID and date."))
confdef.add_def('overlay_desc', shconf.StringDef(default="", optional=True,
helptext="Text to add to the overlay after the system ID and camera name"))
confdef.add_def('jpeg_quality', shconf.IntDef(default=85, minval=60, maxval=95, optional=True,
helptext="JPEG quality to save with. Max of 95, passed directly to Pillow"))
confdef.add_def('stabilise_delay', shconf.IntDef(default=5, minval=1, maxval=30, optional=True,
helptext="Number of seconds to wait after starting each camera for exposure and white balance to settle"))
array = confdef.add_def('trigger', shconf.TableArrayDef(
helptext="Array of triggers that will use all cameras"))
array.add_def('hour', shconf.StringDef())
array.add_def('minute', shconf.StringDef())
array.add_def('second', shconf.StringDef(default="0", optional=True))
camarray = confdef.add_def('camera', shconf.TableArrayDef(
helptext="List of cameras to try and connect to. Multiple ports may be listed, and any not connected will be skipped on each trigger."))
camarray.add_def('name', shconf.StringDef(default="", optional=False,
helptext="Name of camera, appended to filename and added to overlay"))
camarray.add_def('usb_port', shconf.StringDef(default="*", optional=False,
helptext="USB port descriptor of the from '3.4.1' (which would indicate port1 on a hub plugged into port4 on a hub plugged into port 3 of the system). This can be found by running 'v4l2-ctl --list-devices'. A single camera with a wildcard '*' port is also allowed, and will match any remaining available camera."))
def __init__(self, pluginInterface, config):
super().__init__(pluginInterface, config)
self.config = config
self.interface = pluginInterface
self.plugins = pluginInterface.other_plugins
self.hooks = pluginInterface.hooks
self.root_dir = os.path.expanduser(pluginInterface.coreconfig["root_dir"])
self.id = pluginInterface.coreconfig["id"]
self.interface.register_hook("pre_cam")
self.interface.register_hook("post_cam")
self.interface.register_function(self.camera_job)
# do some camera init stuff
print("USBCamera config:")
print(self.config)
self.gstlock = threading.Lock()
if self.config["save_directory"] is "":
self.save_directory = os.path.join(self.root_dir, "usbcamera")
else:
self.save_directory = self.config["save_directory"]
if not os.path.exists(self.save_directory):
os.makedirs(self.save_directory)
if self.config["show_overlay"]:
# Load assets
self.logo_im = Image.open(logo_filename)
self.font_size_cache = {}
self.logo_size_cache = {}
# Dict of camera names storing the USB path as the value
self.defined_cams = OrderedDict()
# List of wildcard camera names
self.wildcard_cams = []
# Go through camera configs sorted by name
for camera in sorted(self.config["camera"], key=itemgetter("name")):
if camera["name"] in self.defined_cams:
raise shconf.InvalidConfigError(
"Can't have more than one usb camera defined with the same config name")
if camera["usb_port"] == '*':
self.wildcard_cams.append(camera["name"])
else:
self.defined_cams[camera["name"]] = camera["usb_port"]
for trigger in self.config["trigger"]:
trigger_id = trigger["hour"]+'-' + trigger["minute"]+'-'+trigger["second"]
self.interface.add_job(
self.camera_job, trigger["hour"], trigger["minute"], trigger["second"], job_name=trigger_id)
def _generate_overlay(self, width, height, image_time, camera_name):
font_size = int(height/40)
margin_size = int(font_size/5)
if font_size not in self.font_size_cache:
self.font_size_cache[font_size] = ImageFont.truetype(
overlayfont_filename, int(font_size*0.9))
thisfont = self.font_size_cache[font_size]
if font_size not in self.logo_size_cache:
newsize = (int(self.logo_im.width*(
font_size/self.logo_im.height)), font_size)
self.logo_size_cache[font_size] = self.logo_im.resize(
newsize, Image.BILINEAR)
thislogo = self.logo_size_cache[font_size]
desc_text = camera_name + " " + self.config["overlay_desc"]
if self.config["append_id"]:
desc_text = self.id + " " + desc_text
time_text = image_time.strftime("%Y-%m-%d %H:%M:%S")
overlay = Image.new('RGBA', (width, font_size+(2*margin_size)), (0, 0, 0))
overlay.paste(thislogo, (int((overlay.width-thislogo.width)/2), margin_size))
draw = ImageDraw.Draw(overlay)
draw.text((margin_size*2, margin_size), desc_text,
font=thisfont, fill=(255, 255, 255, 255))
datewidth, _ = draw.textsize(time_text, thisfont)
draw.text((overlay.width-(margin_size*2)-datewidth, margin_size), time_text, font=thisfont,
fill=(255, 255, 255, 255))
# make whole overlay half transparent
overlay.putalpha(128)
return overlay
def _process_image(self, cv_frame, camera_name):
image_time = datetime.now()
# Convert over to PIL. Mostly so we can use our own font.
img = Image.fromarray(cv2.cvtColor(cv_frame, cv2.COLOR_BGR2RGB))
if self.config["show_overlay"]:
overlay = self._generate_overlay(img.width, img.height, image_time, camera_name)
img.paste(overlay, (0, img.height-overlay.height), overlay)
image_filename = image_time.strftime("%Y-%m-%d %H-%M-%S")
if self.config["append_id"]:
image_filename = image_filename + " " + self.id
if camera_name is not "":
image_filename = image_filename+" "+camera_name
image_filename = image_filename + ".jpg"
image_filename = os.path.join(self.save_directory, image_filename)
img.save(image_filename+".writing", "JPEG", quality=self.config["jpeg_quality"])
os.rename(image_filename+".writing", image_filename)
if self.config["upload_images"]:
self.plugins["uploader"].move_to_bucket(image_filename, self.config["upload_bucket"])
def _capture_image(self, device_path, camera_name):
print("Running camera "+camera_name)
with self.gstlock:
#gst_str = ('v4l2src device='+device_path+' ! '
# 'videoconvert ! appsink drop=true max-buffers=1 sync=false')
#vidcap = cv2.VideoCapture(gst_str, cv2.CAP_GSTREAMER)
fmts = get_capture_formats(device_path)
with VideoCaptureCtx(device_path, cv2.CAP_V4L2) as vidcap:
if "MJPG" in fmts:
size = get_largest_resolution(fmts["MJPG"])
set_camera_format_opencv(vidcap, "MJPG", size[0], size[1])
# stream only starts after first grab
print("Starting cam")
read_flag, frame = vidcap.read()
delay_start = time.time()
while (time.time() - delay_start) < self.config["stabilise_delay"]:
vidcap.grab()
#time.sleep(self.config["stabilise_delay"])
# clear old buffer
#print("Flushing capture")
#vidcap.grab()
print("Reading")
read_flag, frame = vidcap.read()
#print("Changing to YUYV")
#if "YUYV" in fmts:
# size = get_largest_resolution(fmts["YUYV"])
# set_camera_format_opencv(vidcap, "YUYV", size[0], size[1])
#print("Reading again")
#read_flag, frame2 = vidcap.read()
if read_flag:
self._process_image(frame, camera_name)
#self._process_image(frame2, camera_name+"(2)")
else:
print("Could not read camera "+camera_name +
" on USB port "+device_path)
def camera_job(self):
self.hooks.pre_cam()
connected_cams = OrderedDict(get_connected_cameras())
for defined_name, defined_usb_path in self.defined_cams.items():
if defined_usb_path in connected_cams:
self._capture_image(connected_cams.pop(
defined_usb_path), defined_name)
else:
print("USB Camera "+defined_name+" on port " +
defined_usb_path+" is not currently connected")
for cam_name in self.wildcard_cams:
if len(connected_cams) > 0:
self._capture_image(connected_cams.popitem(
last=False)[1], cam_name)
else:
print(
"No connected USB cameras are currently left to match to "+cam_name+" ")
break
self.hooks.post_cam()
if __name__ == "__main__":
pass
# print("main")
# main(sys.argv[1:])

@ -0,0 +1,208 @@
#!/usr/bin/env python3
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.triggers.cron import CronTrigger
from apscheduler.events import *
from collections import namedtuple
from tzlocal import get_localzone
from datetime import datetime
from datetime import timezone
from datetime import timedelta
import time
import subprocess
import os
import shepherd.plugin
import io
def is_raspberry_pi(raise_on_errors=False):
"""Checks if Raspberry PI.
:return:
"""
try:
with io.open('/proc/cpuinfo', 'r') as cpuinfo:
found = False
for line in cpuinfo:
if line.startswith('Hardware'):
found = True
label, value = line.strip().split(':', 1)
value = value.strip()
if value not in (
'BCM2708',
'BCM2709',
'BCM2835',
'BCM2836'
):
if raise_on_errors:
raise ValueError(
'This system does not appear to be a '
'Raspberry Pi.'
)
else:
return False
if not found:
if raise_on_errors:
raise ValueError(
'Unable to determine if this system is a Raspberry Pi.'
)
else:
return False
except IOError:
if raise_on_errors:
raise ValueError('Unable to open `/proc/cpuinfo`.')
else:
return False
return True
_scheduler = None
joblist_path = None
JobDescription = namedtuple(
'Job', ['jobstring', 'hour', 'minute', 'second'])
def init_scheduler(core_config):
global _scheduler
global joblist_path
_scheduler = BlockingScheduler()
joblist_path = os.path.expanduser(core_config["root_dir"])
joblist_path = os.path.join(joblist_path, "joblist.shp")
def schedule_job(job_desc):
if not isinstance(job_desc, JobDescription):
raise Exception(
"Argument to schedule_job() must be an instance of shepherd.scheduler.JobDescription")
global _scheduler
# resolve callable
splitstring = job_desc.jobstring.split(':')
if not ((len(splitstring) == 4) and (splitstring[0] == "shepherd")):
raise Exception(
"Could not add job - jobstring is not a valid Shepherd job")
job_plugin, job_function, job_name = splitstring[1:]
func = getattr(shepherd.plugin.plugin_functions[job_plugin], job_function)
triggerstr = "!"+job_desc.hour+":"+job_desc.minute+":"+job_desc.second
for job in _scheduler.get_jobs():
#Jobstring must be the same, triggerstring can vary and still match
if job.id.startswith(job_desc.jobstring):
# Jobstring matches existing job, so update it
print("Modifying job "+job.id)
print("next run was "+job.next_run_time)
job.modify(func=func, trigger=CronTrigger(
hour=job_desc.hour, minute=job_desc.minute, second=job_desc.second))
print("next is now "+job.next_run_time)
break
else:
# No matching job found, so new job
print("Scheduling job "+job_desc.jobstring)
newjob=_scheduler.add_job(func, id=job_desc.jobstring+triggerstr, coalesce=True, misfire_grace_time=300, trigger=CronTrigger(
hour=job_desc.hour, minute=job_desc.minute, second=job_desc.second))
#print("Next scheduled for "+str(newjob.next_run_time))
# Needs to be called after plugins are initialised, so interface functions
# are available
# Remember to wipe job store and not restore on next boot if config file was changed
def save_jobs():
joblist = _scheduler.get_jobs()
saved_jobs = []
next_job_time = None
for job in joblist:
jobstring, _, triggerstr = job.id.partition('!')
jobstring_parts = jobstring.split(':')
triggerstr_parts = triggerstr.split(':')
if not ((len(jobstring_parts) == 4) and (jobstring_parts[0] == "shepherd")):
raise Exception(
"Could not save scheduler job "+job.id+" - ID is not a valid Shepherd job")
if not isinstance(job.trigger, CronTrigger):
raise Exception("Could not save scheduler job " +
job.id+" - Trigger is not a CronTrigger")
saved_jobs.append(job.id)
if next_job_time is not None:
if job.next_run_time < next_job_time:
next_job_time = job.next_run_time
else:
next_job_time = job.next_run_time
with open(joblist_path+".writing", 'w+') as f:
for saved_job in saved_jobs:
f.write("%s\n" % saved_job)
os.rename(joblist_path+".writing", joblist_path)
return next_job_time
# Currently to wakeup functionality is based on a hard-coded dependancy on the Scout
# plugin. We'd like this to instead be independant, and provide hooks for modules to register with that provide this
# A problem with just providing a "set_alarm" hook or something is that we really need to be able to
# confirm that an alarm has been set and we _are_ going to wake up gain correctly.
# Could potentially provide a interface function that others can call to set a "Next Alarm" state variable. This can then be
# checked after the hook call to verify.
# At the moment shutdown is just triggered 1 minute after setting the alarm - mostly just to allow time for things to upload.
# Having things be properly event based instead would be better - adding random delays all around the place inevetiably leads
# "delay creep" - where all the "just in case" delays just build up and up and wind up making the whole thing take ages to do anything.
# Instead we really need some sort of "current jobs/dependancies" queue thing - containing active stuff to be dealt with that should
# hold the system active (sidenote - this same system could easily then be used to hold things on to access them remotely)
# Would ideally have a mechanism that makes it hard to accidentally leave an item/job in the list that stops the system shutting down -
# maybe look at a context manager used on an object in each plugin?
def _jobs_changed(event):
next_job_time = save_jobs()
# default to idle for 5 mins
early_wakeup_period = timedelta(minutes=1)
now_datetime = datetime.now(get_localzone())
next_idle_period = timedelta(minutes=5)
if next_job_time is not None:
next_idle_period = next_job_time - now_datetime
if next_idle_period > timedelta(hours=6):
next_idle_period = timedelta(hours=6)
wakeup_time = None
if next_idle_period > timedelta(minutes=4):
wakeup_time = now_datetime+(next_idle_period-early_wakeup_period)
if wakeup_time is not None:
alarm_str = str(int(wakeup_time.timestamp()))
print("waking up at "+ str(wakeup_time) + " ("+alarm_str+")")
retval=shepherd.plugin.plugin_functions["scout"].set_alarm(alarm_str)
if retval is not None:
print(retval)
if retval == alarm_str:
if is_raspberry_pi():
print("Shutting down in 1 minute")
time.sleep(60)
subprocess.run(["shutdown","now"])
else:
print("Alarm set response was incorrect")
else:
print("Did not get reply from Scout after setting alarm")
def restore_jobs():
pass
def start():
global _scheduler
_scheduler.add_listener(_jobs_changed, EVENT_JOB_ADDED | EVENT_JOB_REMOVED | EVENT_JOB_MODIFIED | EVENT_JOB_EXECUTED | EVENT_JOB_MISSED | EVENT_SCHEDULER_STARTED)
_scheduler.start()

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 954 KiB

@ -0,0 +1,24 @@
[shepherd]
plugin_dir = "~/shepherd/"
plugins = ["scout"]
root_dir = "~/shepherd/"
conf_edit_path = "~/shepherd.toml"
id = "testnode"
[scout]
boardver = "3"
serialport = "/dev/ttyUSB0"
[usbcam]
[[usbcam.camera]]
name = "USB1"
usb_port = "*"
[[usbcam.camera]]
name = "USB2"
usb_port = "*"
[[usbcam.camera]]
name = "USB3"
usb_port = "3.1.2.1"
[[usbcam.trigger]]
hour = "*"
minute ="*/10"
second = "1"
Loading…
Cancel
Save