diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..d213889 --- /dev/null +++ b/.gitignore @@ -0,0 +1,718 @@ + +# Created by https://www.toptal.com/developers/gitignore/api/python,windows,macos,linux,pycharm,visualstudio,visualstudiocode +# Edit at https://www.toptal.com/developers/gitignore?templates=python,windows,macos,linux,pycharm,visualstudio,visualstudiocode + +### Linux ### +*~ + +# temporary files which can be created if a process still has a handle open of a deleted file +.fuse_hidden* + +# KDE directory preferences +.directory + +# Linux trash folder which might appear on any partition or disk +.Trash-* + +# .nfs files are created when an open file is removed but is still being accessed +.nfs* + +### macOS ### +# General +.DS_Store +.AppleDouble +.LSOverride + +# Icon must end with two \r +Icon + + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +### PyCharm ### +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider +# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 + +# User-specific stuff +.idea/**/workspace.xml +.idea/**/tasks.xml +.idea/**/usage.statistics.xml +.idea/**/dictionaries +.idea/**/shelf + +# AWS User-specific +.idea/**/aws.xml + +# Generated files +.idea/**/contentModel.xml + +# Sensitive or high-churn files +.idea/**/dataSources/ +.idea/**/dataSources.ids +.idea/**/dataSources.local.xml +.idea/**/sqlDataSources.xml +.idea/**/dynamic.xml +.idea/**/uiDesigner.xml +.idea/**/dbnavigator.xml + +# Gradle +.idea/**/gradle.xml +.idea/**/libraries + +# Gradle and Maven with auto-import +# When using Gradle or Maven with auto-import, you should exclude module files, +# since they will be recreated, and may cause churn. Uncomment if using +# auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml +# .idea/modules.xml +# .idea/*.iml +# .idea/modules +# *.iml +# *.ipr + +# CMake +cmake-build-*/ + +# Mongo Explorer plugin +.idea/**/mongoSettings.xml + +# File-based project format +*.iws + +# IntelliJ +out/ + +# mpeltonen/sbt-idea plugin +.idea_modules/ + +# JIRA plugin +atlassian-ide-plugin.xml + +# Cursive Clojure plugin +.idea/replstate.xml + +# Crashlytics plugin (for Android Studio and IntelliJ) +com_crashlytics_export_strings.xml +crashlytics.properties +crashlytics-build.properties +fabric.properties + +# Editor-based Rest Client +.idea/httpRequests + +# Android studio 3.1+ serialized cache file +.idea/caches/build_file_checksums.ser + +### PyCharm Patch ### +# Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 + +# *.iml +# modules.xml +# .idea/misc.xml +# *.ipr + +# Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint +.idea/**/sonarlint/ + +# SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin +.idea/**/sonarIssues.xml + +# Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced +.idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml +.idea/**/markdown-navigator/ + +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +### Python ### +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +### VisualStudioCode ### +.vscode/* +!.vscode/settings.json +!.vscode/tasks.json +!.vscode/launch.json +!.vscode/extensions.json +*.code-workspace + +# Local History for Visual Studio Code +.history/ + +### VisualStudioCode Patch ### +# Ignore all local history of files +.history +.ionide + +# Support for Project snippet scope +!.vscode/*.code-snippets + +### Windows ### +# Windows thumbnail cache files +Thumbs.db +Thumbs.db:encryptable +ehthumbs.db +ehthumbs_vista.db + +# Dump file +*.stackdump + +# Folder config file +[Dd]esktop.ini + +# Recycle Bin used on file shares +$RECYCLE.BIN/ + +# Windows Installer files +*.cab +*.msi +*.msix +*.msm +*.msp + +# Windows shortcuts +*.lnk + +### VisualStudio ### +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Ww][Ii][Nn]32/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.tlog +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Nuget personal access tokens and Credentials +# nuget.config + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools + +# Local History for Visual Studio Code + +# Windows Installer files from build outputs + +# JetBrains Rider +.idea/ +*.sln.iml + +### VisualStudio Patch ### +# Additional files built by Visual Studio + +# End of https://www.toptal.com/developers/gitignore/api/python,windows,macos,linux,pycharm,visualstudio,visualstudiocode \ No newline at end of file diff --git a/AnimationCSVFormat.md b/AnimationCSVFormat.md new file mode 100644 index 0000000..e9f16e0 --- /dev/null +++ b/AnimationCSVFormat.md @@ -0,0 +1,21 @@ +#Animation File Format + +The animation files are CSV (comma separated value) spreadsheet files. + +The first row contains the column names which are used to identify the contents of the column. +Every subsequent row contains the data for each frame. + +The header names are: + +`FRAME_ID` - This stores the index of the frame. +This column should contain integers. +Lowest values will be displayed first. +This column is optional. If undefined the frames will be displayed in the order they are in the CSV file. + +`FRAME_TIME` - The amount of time the frame will remain for in milliseconds. +This should contain ints or floats eg a value of 33.33 is 33.33ms or 1/30th of a second. +This column is optional. If undefined will default to 0 and will run as fast as the hardware will allow. + +`[RGB]_[0-9]+` - The intensity of each colour channel for the given LED index. +Examples are `R_0`, `G_0` and `B_0` which are the red, green and blue channel for LED 0. +The values of these columns should be floats or ints between 0 and 255 inclusive. diff --git a/README.md b/README.md index 58715c5..e827840 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,10 @@ This repository contains the code and coordinates used for Matt's 2021 Christmas Code in the `examples` folder has been provided by other contributors! +The format of the CSV animation files can be found in [AnimationCSVFormat.md](AnimationCSVFormat.md) + +The code to run the animation CSV files can be found in [execution](execution) + Most of what you need is probably over on the Harvard Graduate School of Design repository: ["GSD-6338: Introduction to Computational Design"](https://github.com/GSD6338) ## Usage diff --git a/execution/flush.py b/execution/flush.py new file mode 100644 index 0000000..1346bde --- /dev/null +++ b/execution/flush.py @@ -0,0 +1,18 @@ +# Based on code from https://github.com/standupmaths/xmastree2020 + +import board +import neopixel + + +def main(): + number_of_leds = 500 + pixels = neopixel.NeoPixel(board.D18, number_of_leds) + + for led in range(number_of_leds): + pixels[led] = (0, 0, 0) + + print("Done") + + +if __name__ == "__main__": + main() diff --git a/execution/run-folder.py b/execution/run-folder.py new file mode 100644 index 0000000..f8316c8 --- /dev/null +++ b/execution/run-folder.py @@ -0,0 +1,110 @@ +# Based on code from https://github.com/standupmaths/xmastree2020 +# Modified heavily by gentlegiantJGC + +from typing import List +import os +import argparse + +import board +import neopixel + +from run_utils import parse_animation_csv, draw_frames, draw_lerp_frames, Sequence + +NUMBER_OF_LEDS = 500 + + +def run_folder(folder_path: str, loops_per_sequence: int, transition_frames: int): + print(f"Sequences will loop {loops_per_sequence} times") + print(f"Sequences will blend over {transition_frames} frames") + + print("Loading animation spreadsheets. This may take a while.") + + # Load and parse all the sequences at the beginning (it's a heavy process for the pi) + csv_files: List[str] = [] + sequences: List[Sequence] = [] + for file_name in os.listdir(folder_path): + full_path = os.path.join(folder_path, file_name) + if file_name.endswith(".csv") and os.path.isfile(full_path): + try: + # try loading the spreadsheet and report any errors + sequence = parse_animation_csv(full_path, NUMBER_OF_LEDS, "RGB") + except Exception as e: + print(f"Failed loading spreadsheet {file_name}.\n{e}") + else: + # if the spreadsheet successfully loaded then add it to the data + sequences.append(sequence) + csv_files.append(file_name) + + print("Finished loading animation spreadsheets.") + + # Init the neopixel + pixels = neopixel.NeoPixel( + board.D18, NUMBER_OF_LEDS, auto_write=False, pixel_order=neopixel.RGB + ) + + last_frame = None + + # Play all sequences in a loop + while True: + # iterate over the sequences + for sequence_id, (file_name, (frames, frame_times)) in enumerate( + zip(csv_files, sequences) + ): + print(f"Playing file {file_name}") + for loop in range(0, loops_per_sequence): + # run this bit as many as was requested + if ( + last_frame is not None + and frames + and any( + # if any of the colour channels are greater than 20 points different then lerp between them. + abs(channel_b - channel_a) > 20 + for led_a, led_b in zip(last_frame, frames[0]) + for channel_a, channel_b in zip(led_a, led_b) + ) + ): + # if an animation has played and the last and first frames are different enough + # then interpolate from the last state to the first state + # Some animations may be designed to loop so adding a fade will look weird + draw_lerp_frames(pixels, last_frame, frames[0], transition_frames) + + print(f"Loop {loop + 1} of {loops_per_sequence}") + + # push all the frames to the tree + draw_frames(pixels, frames, frame_times) + + # Store the last frame if it exists + if frames: + last_frame = frames[-1] + + +def main(): + # parser to parse the command line inputs + parser = argparse.ArgumentParser(description="Run all spreadsheet in a directory.") + parser.add_argument( + "csv_directory", + metavar="csv-directory", + type=str, + help="The absolute or relative path to a directory containing csv files.", + ) + parser.add_argument( + "loops_per_sequence", + type=int, + nargs="?", + default=5, + help="The number of times each sequence loops. Default is 5.", + ) + parser.add_argument( + "transition_frames", + type=int, + nargs="?", + default=15, + help="The number of frames (at 30fps) over which to transition between sequences. " + "Set to 0 to disable interpolation.", + ) + args, _ = parser.parse_known_args() + run_folder(args.csv_directory, args.loops_per_sequence, args.transition_frames) + + +if __name__ == "__main__": + main() diff --git a/execution/run.py b/execution/run.py new file mode 100644 index 0000000..49dc9bc --- /dev/null +++ b/execution/run.py @@ -0,0 +1,44 @@ +# Based on code from https://github.com/standupmaths/xmastree2020 +# Modified heavily by gentlegiantJGC + +import argparse + +import board +import neopixel + +from run_utils import parse_animation_csv, draw_frames + +# change if your setup has a different number of LEDs +NUMBER_OF_LEDS = 500 + + +def load_and_run_csv(csv_path): + frames, frame_times = parse_animation_csv(csv_path, NUMBER_OF_LEDS, "RGB") + print("Finished Parsing") + + pixels = neopixel.NeoPixel( + board.D18, NUMBER_OF_LEDS, auto_write=False, pixel_order=neopixel.RGB + ) + + # run the code on the tree + while True: + draw_frames(pixels, frames, frame_times) + + +def main(): + # parser to parse the command line inputs + parser = argparse.ArgumentParser(description="Run a single spreadsheet on loop.") + parser.add_argument( + "csv_path", + metavar="csv-path", + type=str, + help="The absolute or relative path to the csv file.", + ) + + args, _ = parser.parse_known_args() + csv_path = args.csv_path + load_and_run_csv(csv_path) + + +if __name__ == "__main__": + main() diff --git a/execution/run_utils.py b/execution/run_utils.py new file mode 100644 index 0000000..796aed8 --- /dev/null +++ b/execution/run_utils.py @@ -0,0 +1,150 @@ +# Written by gentlegiantJGC + +from typing import Tuple, List, Optional, NamedTuple +import csv +import time + +import neopixel + +Color = Tuple[float, float, float] +Frame = List[Color] +Frames = List[Frame] +FrameTime = float +FrameTimes = List[FrameTime] +Sequence = NamedTuple("Sequence", [("frames", Frames), ("frame_times", FrameTimes)]) + + +def parse_animation_csv( + csv_path: str, number_of_leds: int, channel_order="RGB" +) -> Sequence: + """ + Parse a CSV animation file into python objects. + + :param csv_path: The path to the csv animation file + :param number_of_leds: The number of LEDs that the device supports + :param channel_order: The order the channels should be loaded. Must be "RGB" or "GRB" + :return: A Sequence namedtuple containing frame data and frame times + """ + if channel_order not in ("RGB", "GRB"): + raise ValueError(f"Unsupported channel order {channel_order}") + # parse the CSV file + # The example files in this repository start with \xEF\xBB\xBF See UTF-8 BOM + # If read normally these become part of the first header name + # utf-8-sig reads this correctly and also handles the case when they don't exist + with open(csv_path, "r", encoding="utf-8-sig") as csv_file: + # pass the file object to reader() to get the reader object + csv_reader = csv.reader(csv_file) + + # this is a list of strings containing the column names + header = next(csv_reader) + + # read in the remaining data + data = list(csv_reader) + + # create a dictionary mapping the header name to the index of the header + header_indexes = dict(zip(header, range(len(header)))) + + # find the column numbers of each required header + # we should not assume that the columns are in a known order. Isn't that the point of column names? + # If a column does not exist it is set to None which is handled at the bottom and populates the column with 0.0 + led_columns: List[Tuple[Optional[int], Optional[int], Optional[int]]] = [ + tuple( + header_indexes.pop(f"{channel}_{led_index}", None) + for channel in channel_order + ) + for led_index in range(number_of_leds) + ] + + if "FRAME_ID" in header_indexes: + # get the frame id column index + frame_id_column = header_indexes.pop("FRAME_ID") + # don't assume that the frames are in chronological order. Isn't that the point of storing the frame index? + # sort the frames by the frame index + data = sorted(data, key=lambda frame_data: int(frame_data[frame_id_column])) + # There may be a case where a frame is missed eg 1, 2, 4, 5, ... + # Should we duplicate frame 2 in this case? + # For now it can go straight from frame 2 to 4 + + if "FRAME_TIME" in header_indexes: + # Add the ability for the CSV file to specify how long the frame should remain for + # This will allow users to customise the frame rate and even have variable frame rates + # Note that frame rate is hardware limited because the method that pushes changes to the tree takes a while. + frame_time_column = header_indexes.pop("FRAME_TIME") + frame_times = [float(frame_data[frame_time_column])/1000 for frame_data in data] + else: + # if the frame time column is not defined then run as fast as possible like the old code. + frame_times = [0] * len(data) + + frames = [ + [ + tuple( + # Get the LED value or populate with 0.0 if the column did not exist + 0.0 if channel is None else float(frame_data[channel]) + # for each channel in the LED + for channel in channels + ) + # for each LED in the chain + for channels in led_columns + ] + # for each frame in the data + for frame_data in data + ] + return Sequence(frames, frame_times) + + +def draw_frame(pixels: neopixel.NeoPixel, frame: Frame, frame_time: float): + """ + Draw a single frame and wait to make up the frame time if required. + + :param pixels: The neopixel interface + :param frame: The frame to draw + :param frame_time: The time this frame should remain on the device + """ + t = time.perf_counter() + for led in range(pixels.n): + pixels[led] = frame[led] + pixels.show() + end_time = t + frame_time + while time.perf_counter() < end_time: + time.sleep(0) + + +def draw_frames(pixels: neopixel.NeoPixel, frames: Frames, frame_times: FrameTimes): + """ + Draw a series of frames to the tree. + + :param pixels: The neopixel interface + :param frames: The frames to draw + :param frame_times: The frame time for each frame + """ + for frame, frame_time in zip(frames, frame_times): + draw_frame(pixels, frame, frame_time) + + +def draw_lerp_frames( + pixels: neopixel.NeoPixel, + last_frame: Frame, + next_frame: Frame, + transition_frames: int, +): + """ + Interpolate between two frames and draw the result. + + :param pixels: The neopixel interface + :param last_frame: The start frame + :param next_frame: The end frame + :param transition_frames: The number of frames to take to fade + """ + for frame_index in range(1, transition_frames): + ratio = frame_index / transition_frames + draw_frame( + pixels, + [ + tuple( + round((1 - ratio) * channel_a + ratio * channel_b) + for channel_a, channel_b in zip(led_a, led_b) + ) + for led, (led_a, led_b) in enumerate(zip(last_frame, next_frame)) + ], + 1 / 30, + ) diff --git a/execution/turnon.py b/execution/turnon.py new file mode 100644 index 0000000..e1b6535 --- /dev/null +++ b/execution/turnon.py @@ -0,0 +1,23 @@ +# Based on code from https://github.com/standupmaths/xmastree2020 + +import time +import sys + +import board +import neopixel + + +def main(): + number_of_leds = 500 + pixels = neopixel.NeoPixel(board.D18, number_of_leds) + + ids = sys.argv[1:] + for led in ids: + pixels[int(led)] = (255, 255, 255) + time.sleep(1) + + print("Done") + + +if __name__ == "__main__": + main()