diff --git a/.github/workflows/jekyll.yml b/.github/workflows/jekyll.yml index 14fde072..5c9bd510 100644 --- a/.github/workflows/jekyll.yml +++ b/.github/workflows/jekyll.yml @@ -107,7 +107,7 @@ jobs: echo "$SSH_PRIVATE_KEY" > $SSH_KEY_PATH sudo chmod 600 $SSH_KEY_PATH echo "$SSH_KNOWN_HOSTS" > ~/.ssh/known_hosts - wget https://raw.githubusercontent.com/UCSD-E4E/website2.0/main/_deploy_e4e-dev.sh - scp -i $SSH_KEY_PATH _deploy_e4e-dev.sh deploy@kastner-ml.dynamic.ucsd.edu:/tmp/deploy_e4e-dev.sh - ssh -i $SSH_KEY_PATH deploy@kastner-ml.dynamic.ucsd.edu '/bin/bash /tmp/deploy_e4e-dev.sh' + wget https://raw.githubusercontent.com/UCSD-E4E/website2.0/main/_deploy_e4e.sh + scp -i $SSH_KEY_PATH _deploy_e4e.sh deploy@kastner-ml.dynamic.ucsd.edu:/tmp/deploy_e4e.sh + ssh -i $SSH_KEY_PATH deploy@kastner-ml.dynamic.ucsd.edu '/bin/bash /tmp/deploy_e4e.sh' diff --git a/.github/workflows/pr_initial_deploy.yml b/.github/workflows/pr_initial_deploy.yml new file mode 100644 index 00000000..18b1c1f4 --- /dev/null +++ b/.github/workflows/pr_initial_deploy.yml @@ -0,0 +1,28 @@ +name: Deploy Jekyll PR to E4E-DEV + +on: + pull_request: + types: + - opened + - edited + branches: + - main + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-latest + steps: + - name: Execute Deploy + env: + SSH_PRIVATE_KEY: ${{secrets.KASTNER_ML_DEPLOY_SSH}} + SSH_KNOWN_HOSTS: ${{secrets.KASTNER_ML_KNOWN_HOSTS}} + SSH_KEY_PATH: ${{github.workspace}}/../private.key + PR_NUMBER: ${{github.event.number}} + run: | + mkdir -p ~/.ssh/ + echo "$SSH_PRIVATE_KEY" > $SSH_KEY_PATH + chmod 600 $SSH_KEY_PATH + echo "$SSH_KNOWN_HOSTS" > ~/.ssh/known_hosts + # ssh -i $SSH_KEY_PATH deploy@kastner-ml.dynamic.ucsd.edu '/bin/bash /tmp/deploy_e4e.sh' + \ No newline at end of file diff --git a/_bibliography/onboarding_papers/acoustic_species_id.bib b/_bibliography/onboarding_papers/acoustic_species_id.bib index a1827293..43082820 100644 --- a/_bibliography/onboarding_papers/acoustic_species_id.bib +++ b/_bibliography/onboarding_papers/acoustic_species_id.bib @@ -5,7 +5,7 @@ @InProceedings{Ayers2021 year = {2021}, month = {July}, volume = {38}, - abstract = {The acoustic signature of a natural soundscape can reveal consequences of climate change on biodiversity. Hardware costs, human labor time, and expertise dedicated to labeling audio are impediments to conducting acoustic surveys across a representative portion of an ecosystem. These barriers are quickly eroding away with the advent of low-cost, easy to use, open source hardware and the expansion of the machine learning field providing pre-trained neural networks to test on retrieved acoustic data. One consistent challenge in passive acoustic monitoring (PAM) is a lack of reliability from neural networks on audio recordings collected in the field that contain crucial biodiversity information that otherwise show promising results from publicly available training and test sets. To demonstrate this challenge, we tested a hybrid recurrent neural network (RNN) and convolutional neural network (CNN) binary classifier trained for bird presence/absence on two Peruvian bird audiosets. The RNN achieved an area under the receiver operating characteristics (AUROC) of 95% on a dataset collected from Xeno-canto and Google’s AudioSet ontology in contrast to 65% across a stratified random sample of field recordings collected from the Madre de Dios region of the Peruvian Amazon. In an attempt to alleviate this discrepancy, we applied various audio data augmentation techniques in the network’s training process which led to an AUROC of 77% across the field recordings}, + abstract = {The acoustic signature of a natural soundscape can reveal consequences of climate change on biodiversity. Hardware costs, human labor time, and expertise dedicated to labeling audio are impediments to conducting acoustic surveys across a representative portion of an ecosystem. These barriers are quickly eroding away with the advent of low-cost, easy to use, open source hardware and the expansion of the machine learning field providing pre-trained neural networks to test on retrieved acoustic data. One consistent challenge in passive acoustic monitoring (PAM) is a lack of reliability from neural networks on audio recordings collected in the field that contain crucial biodiversity information that otherwise show promising results from publicly available training and test sets. To demonstrate this challenge, we tested a hybrid recurrent neural network (RNN) and convolutional neural network (CNN) binary classifier trained for bird presence/absence on two Peruvian bird audiosets. The RNN achieved an area under the receiver operating characteristics (AUROC) of 95% on a dataset collected from Xeno-canto and Google's AudioSet ontology in contrast to 65% across a stratified random sample of field recordings collected from the Madre de Dios region of the Peruvian Amazon. In an attempt to alleviate this discrepancy, we applied various audio data augmentation techniques in the network's training process which led to an AUROC of 77% across the field recordings}, url = {https://www.climatechange.ai/papers/icml2021/14}, } diff --git a/_bibliography/onboarding_papers/fishsense.bib b/_bibliography/onboarding_papers/fishsense.bib index 8aae2aff..83846f93 100644 --- a/_bibliography/onboarding_papers/fishsense.bib +++ b/_bibliography/onboarding_papers/fishsense.bib @@ -5,7 +5,7 @@ @INPROCEEDINGS{tueller_maddukuri_paxson_et_al_oceans_2021 year = {2021}, month={September}, publisher={IEEE}, - abstract={There is a need for reliable underwater fish monitoring systems that can provide oceanographers and researchers with valuable data about life underwater. Most current methods rely heavily on human observation which is both error prone and costly. FishSense provides a solution that accelerates the use of depth cameras underwater, opening the door to 3D underwater imaging that is fast, accurate, cost effective, and energy efficient. FishSense is a sleek handheld underwater imaging device that captures both depth and color images. This data has been used to calculate the length of fish, which can be used to derive biomass and health. The FishSense platform has been tested through two separate deployments. The first deployment imaged a toy fish of known length and volume within a controlled testing pool. The second deployment was conducted within an 70,000 gallon aquarium tank with multiple species of fish. A Receiver Operating Characteristic (ROC) curve has been computed based on the detector’s performance across all images, and the mean and standard deviation of the length measurements of the detections has been computed.}, + abstract={There is a need for reliable underwater fish monitoring systems that can provide oceanographers and researchers with valuable data about life underwater. Most current methods rely heavily on human observation which is both error prone and costly. FishSense provides a solution that accelerates the use of depth cameras underwater, opening the door to 3D underwater imaging that is fast, accurate, cost effective, and energy efficient. FishSense is a sleek handheld underwater imaging device that captures both depth and color images. This data has been used to calculate the length of fish, which can be used to derive biomass and health. The FishSense platform has been tested through two separate deployments. The first deployment imaged a toy fish of known length and volume within a controlled testing pool. The second deployment was conducted within an 70,000 gallon aquarium tank with multiple species of fish. A Receiver Operating Characteristic (ROC) curve has been computed based on the detector's performance across all images, and the mean and standard deviation of the length measurements of the detections has been computed.}, url={https://agu.confex.com/agu/OVS21/meetingapp.cgi/Paper/787405}} @ARTICLE{wong_humphrey_switzer_wuwnet_2022, author = {Wong, Emily and Humphrey, Isabella and Switzer, Scott and Crutchfield, Christopher and Hui, Nathan and Schurgers, Curt and Kastner, Ryan}, diff --git a/_bibliography/onboarding_papers/radio_telemetry_tracking.bib b/_bibliography/onboarding_papers/radio_telemetry_tracking.bib index f7919910..5a16ad81 100644 --- a/_bibliography/onboarding_papers/radio_telemetry_tracking.bib +++ b/_bibliography/onboarding_papers/radio_telemetry_tracking.bib @@ -3,7 +3,7 @@ @Article{Hui2021 journal = {Journal of Field Robotics}, title = {A more precise way to localize animals using drones}, year = {2021}, - abstract = {Abstract Radio telemetry is a commonly used technique in conservation biology and ecology, particularly for studying the movement and range of individuals and populations. Traditionally, most radio telemetry work is done using handheld directional antennae and either direction-finding and homing techniques or radio-triangulation techniques. Over the past couple of decades, efforts have been made to utilize unmanned aerial vehicles to make radio-telemetry tracking more efficient, or cover more area. However, many of these approaches are complex and have not been rigorously field-tested. To provide scientists with reliable quality tracking data, tracking systems need to be rigorously tested and characterized. In this paper, we present a novel, drone-based, radio-telemetry tracking method for tracking the broad-scale movement paths of animals over multiple days and its implementation and deployment under field conditions. During a 2-week field period in the Cayman Islands, we demonstrated this system's ability to localize multiple targets simultaneously, in daily 10 min tracking sessions over a period of 2 weeks, generating more precise estimates than comparable efforts using manual triangulation techniques.}, + abstract = {Abstract Radio telemetry is a commonly used technique in conservation biology and ecology, particularly for studying the movement and range of individuals and populations. Traditionally, most radio telemetry work is done using handheld directional antennae and either direction-finding and homing techniques or radio-triangulation techniques. Over the past couple of decades, efforts have been made to utilize unmanned aerial vehicles to make radio-telemetry tracking more efficient, or cover more area. However, many of these approaches are complex and have not been rigorously field-tested. To provide scientists with reliable quality tracking data, tracking systems need to be rigorously tested and characterized. In this paper, we present a novel, drone-based, radio-telemetry tracking method for tracking the broad-scale movement paths of animals over multiple days and its implementation and deployment under field conditions. During a 2-week field period in the Cayman Islands, we demonstrated this system's ability to localize multiple targets simultaneously, in daily 10 min tracking sessions over a period of 2 weeks, generating more precise estimates than comparable efforts using manual triangulation techniques.}, doi = {https://doi.org/10.1002/rob.22017}, keywords = {aerial robotics, environmental monitoring, exploration, rotorcraft}, url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.22017}, diff --git a/_bibliography/onboarding_papers/smartfin.bib b/_bibliography/onboarding_papers/smartfin.bib index c044729e..de490739 100644 --- a/_bibliography/onboarding_papers/smartfin.bib +++ b/_bibliography/onboarding_papers/smartfin.bib @@ -9,7 +9,7 @@ @article{bresnehan_cyronak_brewin_et_al_csr_2022 url = {https://www.sciencedirect.com/science/article/pii/S0278434322001029}, author = {Philip Bresnahan and Tyler Cyronak and Robert J.W. Brewin and Andreas Andersson and Taylor Wirth and Todd Martz and Travis Courtney and Nathan Hui and Ryan Kastner and Andrew Stern and Todd McGrain and Danica Reinicke and Jon Richard and Katherine Hammond and Shannon Waters}, keywords = {Coastal oceanography, Citizen science, Surfing, Sea surface temperature, Outreach}, - abstract = {Coastal populations and hazards are escalating simultaneously, leading to an increased importance of coastal ocean observations. Many well-established observational techniques are expensive, require complex technical training, and offer little to no public engagement. Smartfin, an oceanographic sensor–equipped surfboard fin and citizen science program, was designed to alleviate these issues. Smartfins are typically used by surfers and paddlers in surf zone and nearshore regions where they can help fill gaps between other observational assets. Smartfin user groups can provide data-rich time-series in confined regions. Smartfin comprises temperature, motion, and wet/dry sensing, GPS location, and cellular data transmission capabilities for the near-real-time monitoring of coastal physics and environmental parameters. Smartfin's temperature sensor has an accuracy of 0.05 °C relative to a calibrated Sea-Bird temperature sensor. Data products for quantifying ocean physics from the motion sensor and additional sensors for water quality monitoring are in development. Over 300 Smartfins have been distributed around the world and have been in use for up to five years. The technology has been proven to be a useful scientific research tool in the coastal ocean—especially for observing spatiotemporal variability, validating remotely sensed data, and characterizing surface water depth profiles when combined with other tools—and the project has yielded promising results in terms of formal and informal education and community engagement in coastal health issues with broad international reach. In this article, we describe the technology, the citizen science project design, and the results in terms of natural and social science analyses. We also discuss progress toward our outreach, education, and scientific goals.} + abstract = {Coastal populations and hazards are escalating simultaneously, leading to an increased importance of coastal ocean observations. Many well-established observational techniques are expensive, require complex technical training, and offer little to no public engagement. Smartfin, an oceanographic sensor-equipped surfboard fin and citizen science program, was designed to alleviate these issues. Smartfins are typically used by surfers and paddlers in surf zone and nearshore regions where they can help fill gaps between other observational assets. Smartfin user groups can provide data-rich time-series in confined regions. Smartfin comprises temperature, motion, and wet/dry sensing, GPS location, and cellular data transmission capabilities for the near-real-time monitoring of coastal physics and environmental parameters. Smartfin's temperature sensor has an accuracy of 0.05 °C relative to a calibrated Sea-Bird temperature sensor. Data products for quantifying ocean physics from the motion sensor and additional sensors for water quality monitoring are in development. Over 300 Smartfins have been distributed around the world and have been in use for up to five years. The technology has been proven to be a useful scientific research tool in the coastal ocean—especially for observing spatiotemporal variability, validating remotely sensed data, and characterizing surface water depth profiles when combined with other tools—and the project has yielded promising results in terms of formal and informal education and community engagement in coastal health issues with broad international reach. In this article, we describe the technology, the citizen science project design, and the results in terms of natural and social science analyses. We also discuss progress toward our outreach, education, and scientific goals.} } @Misc{current_efforts, diff --git a/_bibliography/publications.bib b/_bibliography/publications.bib index 47f21d60..92d3f83a 100644 --- a/_bibliography/publications.bib +++ b/_bibliography/publications.bib @@ -7,7 +7,7 @@ @Article{WallaceGurungKastner_JCGI_2024 month = feb, number = {1}, volume = {1}, - abstract = {As weather patterns change worldwide, isolated communities impacted by climate change go unnoticed and we need community-driven solutions. In Himalayan Mustang, Nepal, indigenous Lubra Village faces threats of increasing flash flooding. After every flood, residual muddy sediment hardens across the riverbed like concrete, causing the riverbed elevation to rise. As elevation increases, sediment encroaches on Lubra’s agricultural fields and homes, magnifying flood vulnerability. In the last monsoon season alone, the Lubra community witnessed floods swallowing several agricultural fields and damaging two homes. One solution considers relocating the village to a new location entirely. However, relocation poses a challenging task, as eight centuries of ancestry, heritage, and nuanced cultural complexities exist in both aspects of communal opinion and civil engineering. To investigate this issue further, we utilize remote sensing technologies such as drones and satellite imagery to create unique, highly detailed 3D visualizations and 2D maps to document climate-related impacts in Lubra Village. We also investigate quantifying riverbed elevation trends with digital elevation models to address how the riverbed elevation changes overtime. In tandem, we conduct oral interviews with members of Lubra to understand how flooding and droughts affect their ways of life, allowing us to contextualize these models. Pairing visualized data with personal accounts, we provide an informative story that depicts Himalayan climate change on a local level for supporting Lubra in informing local policy and requesting relief aid.}, + abstract = {As weather patterns change worldwide, isolated communities impacted by climate change go unnoticed and we need community-driven solutions. In Himalayan Mustang, Nepal, indigenous Lubra Village faces threats of increasing flash flooding. After every flood, residual muddy sediment hardens across the riverbed like concrete, causing the riverbed elevation to rise. As elevation increases, sediment encroaches on Lubra's agricultural fields and homes, magnifying flood vulnerability. In the last monsoon season alone, the Lubra community witnessed floods swallowing several agricultural fields and damaging two homes. One solution considers relocating the village to a new location entirely. However, relocation poses a challenging task, as eight centuries of ancestry, heritage, and nuanced cultural complexities exist in both aspects of communal opinion and civil engineering. To investigate this issue further, we utilize remote sensing technologies such as drones and satellite imagery to create unique, highly detailed 3D visualizations and 2D maps to document climate-related impacts in Lubra Village. We also investigate quantifying riverbed elevation trends with digital elevation models to address how the riverbed elevation changes overtime. In tandem, we conduct oral interviews with members of Lubra to understand how flooding and droughts affect their ways of life, allowing us to contextualize these models. Pairing visualized data with personal accounts, we provide an informative story that depicts Himalayan climate change on a local level for supporting Lubra in informing local policy and requesting relief aid.}, doi = {10.62895/2997-0083.1006}, publisher = {School for International Training}, } @@ -26,8 +26,8 @@ @MastersThesis{Crutchfield2023 ecological purposes. Spot uses image processing techniques to generate a pipeline to track moving objects frame-to-frame. It then leverages Bayesian Filtering techniques to use the frame-to-frame motion to track individual identity between consecutive frames. -Each stage of Spot’s pipeline–both image processing and the Bayesian Filtering portions -of the pipeline–introduces many parameters. To determine which parameters are ideal for a +Each stage of Spot's pipeline-both image processing and the Bayesian Filtering portions +of the pipeline-introduces many parameters. To determine which parameters are ideal for a particular dataset, a design space exploration tool, dubbed Sherlock, is used to choose the optimal parameters. As part of this, we evaluate multiple possible objective functions and demonstrate the importance of selecting an appropriate one. @@ -59,7 +59,7 @@ @Article{bresnehan_cyronak_brewin_etal_csr_2022 issn = {0278-4343}, pages = {104748}, volume = {242}, - abstract = {Coastal populations and hazards are escalating simultaneously, leading to an increased importance of coastal ocean observations. Many well-established observational techniques are expensive, require complex technical training, and offer little to no public engagement. Smartfin, an oceanographic sensor–equipped surfboard fin and citizen science program, was designed to alleviate these issues. Smartfins are typically used by surfers and paddlers in surf zone and nearshore regions where they can help fill gaps between other observational assets. Smartfin user groups can provide data-rich time-series in confined regions. Smartfin comprises temperature, motion, and wet/dry sensing, GPS location, and cellular data transmission capabilities for the near-real-time monitoring of coastal physics and environmental parameters. Smartfin's temperature sensor has an accuracy of 0.05 °C relative to a calibrated Sea-Bird temperature sensor. Data products for quantifying ocean physics from the motion sensor and additional sensors for water quality monitoring are in development. Over 300 Smartfins have been distributed around the world and have been in use for up to five years. The technology has been proven to be a useful scientific research tool in the coastal ocean—especially for observing spatiotemporal variability, validating remotely sensed data, and characterizing surface water depth profiles when combined with other tools—and the project has yielded promising results in terms of formal and informal education and community engagement in coastal health issues with broad international reach. In this article, we describe the technology, the citizen science project design, and the results in terms of natural and social science analyses. We also discuss progress toward our outreach, education, and scientific goals.}, + abstract = {Coastal populations and hazards are escalating simultaneously, leading to an increased importance of coastal ocean observations. Many well-established observational techniques are expensive, require complex technical training, and offer little to no public engagement. Smartfin, an oceanographic sensor-equipped surfboard fin and citizen science program, was designed to alleviate these issues. Smartfins are typically used by surfers and paddlers in surf zone and nearshore regions where they can help fill gaps between other observational assets. Smartfin user groups can provide data-rich time-series in confined regions. Smartfin comprises temperature, motion, and wet/dry sensing, GPS location, and cellular data transmission capabilities for the near-real-time monitoring of coastal physics and environmental parameters. Smartfin's temperature sensor has an accuracy of 0.05 °C relative to a calibrated Sea-Bird temperature sensor. Data products for quantifying ocean physics from the motion sensor and additional sensors for water quality monitoring are in development. Over 300 Smartfins have been distributed around the world and have been in use for up to five years. The technology has been proven to be a useful scientific research tool in the coastal ocean—especially for observing spatiotemporal variability, validating remotely sensed data, and characterizing surface water depth profiles when combined with other tools—and the project has yielded promising results in terms of formal and informal education and community engagement in coastal health issues with broad international reach. In this article, we describe the technology, the citizen science project design, and the results in terms of natural and social science analyses. We also discuss progress toward our outreach, education, and scientific goals.}, doi = {10.1016/j.csr.2022.104748}, keywords = {Coastal oceanography, Citizen science, Surfing, Sea surface temperature, Outreach}, url = {https://www.sciencedirect.com/science/article/pii/S0278434322001029}, @@ -90,7 +90,7 @@ @InProceedings{ayers_jandali_hwang_etal_icml_2021 year = {2021}, month = jul, volume = {38}, - abstract = {The acoustic signature of a natural soundscape can reveal consequences of climate change on biodiversity. Hardware costs, human labor time, and expertise dedicated to labeling audio are impediments to conducting acoustic surveys across a representative portion of an ecosystem. These barriers are quickly eroding away with the advent of low-cost, easy to use, open source hardware and the expansion of the machine learning field providing pre-trained neural networks to test on retrieved acoustic data. One consistent challenge in passive acoustic monitoring (PAM) is a lack of reliability from neural networks on audio recordings collected in the field that contain crucial biodiversity information that otherwise show promising results from publicly available training and test sets. To demonstrate this challenge, we tested a hybrid recurrent neural network (RNN) and convolutional neural network (CNN) binary classifier trained for bird presence/absence on two Peruvian bird audiosets. The RNN achieved an area under the receiver operating characteristics (AUROC) of 95% on a dataset collected from Xeno-canto and Google’s AudioSet ontology in contrast to 65% across a stratified random sample of field recordings collected from the Madre de Dios region of the Peruvian Amazon. In an attempt to alleviate this discrepancy, we applied various audio data augmentation techniques in the network’s training process which led to an AUROC of 77% across the field recordings}, + abstract = {The acoustic signature of a natural soundscape can reveal consequences of climate change on biodiversity. Hardware costs, human labor time, and expertise dedicated to labeling audio are impediments to conducting acoustic surveys across a representative portion of an ecosystem. These barriers are quickly eroding away with the advent of low-cost, easy to use, open source hardware and the expansion of the machine learning field providing pre-trained neural networks to test on retrieved acoustic data. One consistent challenge in passive acoustic monitoring (PAM) is a lack of reliability from neural networks on audio recordings collected in the field that contain crucial biodiversity information that otherwise show promising results from publicly available training and test sets. To demonstrate this challenge, we tested a hybrid recurrent neural network (RNN) and convolutional neural network (CNN) binary classifier trained for bird presence/absence on two Peruvian bird audiosets. The RNN achieved an area under the receiver operating characteristics (AUROC) of 95% on a dataset collected from Xeno-canto and Google's AudioSet ontology in contrast to 65% across a stratified random sample of field recordings collected from the Madre de Dios region of the Peruvian Amazon. In an attempt to alleviate this discrepancy, we applied various audio data augmentation techniques in the network's training process which led to an AUROC of 77% across the field recordings}, url = {https://www.climatechange.ai/papers/icml2021/14/paper.pdf}, } @@ -132,7 +132,7 @@ @article{hui_lo_moss_et_al_jfr_2021 keywords = {aerial robotics, environmental monitoring, exploration, rotorcraft}, doi = {https://doi.org/10.1002/rob.22017}, url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.22017}, - abstract = {Abstract Radio telemetry is a commonly used technique in conservation biology and ecology, particularly for studying the movement and range of individuals and populations. Traditionally, most radio telemetry work is done using handheld directional antennae and either direction-finding and homing techniques or radio-triangulation techniques. Over the past couple of decades, efforts have been made to utilize unmanned aerial vehicles to make radio-telemetry tracking more efficient, or cover more area. However, many of these approaches are complex and have not been rigorously field-tested. To provide scientists with reliable quality tracking data, tracking systems need to be rigorously tested and characterized. In this paper, we present a novel, drone-based, radio-telemetry tracking method for tracking the broad-scale movement paths of animals over multiple days and its implementation and deployment under field conditions. During a 2-week field period in the Cayman Islands, we demonstrated this system's ability to localize multiple targets simultaneously, in daily 10 min tracking sessions over a period of 2 weeks, generating more precise estimates than comparable efforts using manual triangulation techniques.}} + abstract = {Abstract Radio telemetry is a commonly used technique in conservation biology and ecology, particularly for studying the movement and range of individuals and populations. Traditionally, most radio telemetry work is done using handheld directional antennae and either direction-finding and homing techniques or radio-triangulation techniques. Over the past couple of decades, efforts have been made to utilize unmanned aerial vehicles to make radio-telemetry tracking more efficient, or cover more area. However, many of these approaches are complex and have not been rigorously field-tested. To provide scientists with reliable quality tracking data, tracking systems need to be rigorously tested and characterized. In this paper, we present a novel, drone-based, radio-telemetry tracking method for tracking the broad-scale movement paths of animals over multiple days and its implementation and deployment under field conditions. During a 2-week field period in the Cayman Islands, we demonstrated this system's ability to localize multiple targets simultaneously, in daily 10 min tracking sessions over a period of 2 weeks, generating more precise estimates than comparable efforts using manual triangulation techniques.}} @InProceedings{perry_tiwari_balaji_reuns_2021, author = {Perry, Sean and Tiwari, Vaibhav and Balaji, Nishant and Joun, Erika and Ayers, Jacob and Tobler, Mathias and Ingram, Ian and Kastner, Ryan and Schurgers, Curt}, @@ -141,7 +141,7 @@ @InProceedings{perry_tiwari_balaji_reuns_2021 year = {2021}, month = oct, pages = {633--638}, - abstract = {Passive acoustic monitoring (PAM) involves deploying audio recorders across a natural environment over a long period of time to collect large quantities of audio data. To parse through this data, researchers have worked with automated annotation techniques stemming from Digital Signal Processing and Machine Learning to identify key species calls and judge a region’s biodiversity. To apply and evaluate those techniques, one must acquire strongly labeled data that marks the exact temporal location of audio events in the data, as opposed to weakly labeled data which only labels the presence of an audio event across a clip.Pyrenote was designed to fit the demand for strong manual labels in PAM data. Based on Audino, an open-source, web-based, and easy-to-deploy audio annotation tool, Pyrenote displays a spectrogram for audio annotation, stores labels in a database, and optimizes the labeling process through simplifying the user interface to produce high-quality annotations in a short time frame. This paper documents Pyrenote’s functionality, how the challenge informed the design of the system, and how it compares to other labeling systems.}, + abstract = {Passive acoustic monitoring (PAM) involves deploying audio recorders across a natural environment over a long period of time to collect large quantities of audio data. To parse through this data, researchers have worked with automated annotation techniques stemming from Digital Signal Processing and Machine Learning to identify key species calls and judge a region's biodiversity. To apply and evaluate those techniques, one must acquire strongly labeled data that marks the exact temporal location of audio events in the data, as opposed to weakly labeled data which only labels the presence of an audio event across a clip.Pyrenote was designed to fit the demand for strong manual labels in PAM data. Based on Audino, an open-source, web-based, and easy-to-deploy audio annotation tool, Pyrenote displays a spectrogram for audio annotation, stores labels in a database, and optimizes the labeling process through simplifying the user interface to produce high-quality annotations in a short time frame. This paper documents Pyrenote's functionality, how the challenge informed the design of the system, and how it compares to other labeling systems.}, doi = {10.1109/MASS52906.2021.00091}, issn = {2155-6814}, } @@ -165,7 +165,7 @@ @InProceedings{tueller_maddukuri_paxson_et_al_oceans_2021 year = {2021}, month = sep, publisher = {IEEE}, - abstract = {There is a need for reliable underwater fish monitoring systems that can provide oceanographers and researchers with valuable data about life underwater. Most current methods rely heavily on human observation which is both error prone and costly. FishSense provides a solution that accelerates the use of depth cameras underwater, opening the door to 3D underwater imaging that is fast, accurate, cost effective, and energy efficient. FishSense is a sleek handheld underwater imaging device that captures both depth and color images. This data has been used to calculate the length of fish, which can be used to derive biomass and health. The FishSense platform has been tested through two separate deployments. The first deployment imaged a toy fish of known length and volume within a controlled testing pool. The second deployment was conducted within an 70,000 gallon aquarium tank with multiple species of fish. A Receiver Operating Characteristic (ROC) curve has been computed based on the detector’s performance across all images, and the mean and standard deviation of the length measurements of the detections has been computed.}, + abstract = {There is a need for reliable underwater fish monitoring systems that can provide oceanographers and researchers with valuable data about life underwater. Most current methods rely heavily on human observation which is both error prone and costly. FishSense provides a solution that accelerates the use of depth cameras underwater, opening the door to 3D underwater imaging that is fast, accurate, cost effective, and energy efficient. FishSense is a sleek handheld underwater imaging device that captures both depth and color images. This data has been used to calculate the length of fish, which can be used to derive biomass and health. The FishSense platform has been tested through two separate deployments. The first deployment imaged a toy fish of known length and volume within a controlled testing pool. The second deployment was conducted within an 70,000 gallon aquarium tank with multiple species of fish. A Receiver Operating Characteristic (ROC) curve has been computed based on the detector's performance across all images, and the mean and standard deviation of the length measurements of the detections has been computed.}, url = {https://agu.confex.com/agu/OVS21/meetingapp.cgi/Paper/787405}, } @@ -294,7 +294,7 @@ @Article{garrison_richmond_naughton_aap_2016 number = {2}, pages = {192--204}, volume = {4}, - abstract = {Archaeological tunneling is a standard excavation strategy in Mesoamerica. The ancient Maya built new structures atop older ones that were no longer deemed usable, whether for logistical or ideological reasons. This means that as archaeologists excavate horizontal tunnels into ancient Maya structures, they are essentially moving back in time. As earlier constructions are encountered, these tunnels may deviate in many directions in order to document architectural remains. The resultant excavations often become intricate labyrinths, extending dozens of meters. Traditional forms of archaeological documentation, such as photographs, plan views, and profile drawings, are limited in their ability to convey the complexity of tunnel excavations. Terrestrial Lidar (light detection and ranging) instruments are able to generate precise 3D models of tunnel excavations. This article presents the results of a model created with a Faro™ Focus 3D 120 Scanner of tunneling excavations at the site of El Zotz, Guatemala. The lidar data document the excavations inside a large mortuary pyramid, including intricately decorated architecture from an Early Classic (A.D. 300–600) platform buried within the present form of the structure. Increased collaboration between archaeologists and scholars with technical expertise maximizes the effectiveness of 3D models, as does presenting digital results in tandem with traditional forms of documentation.}, + abstract = {Archaeological tunneling is a standard excavation strategy in Mesoamerica. The ancient Maya built new structures atop older ones that were no longer deemed usable, whether for logistical or ideological reasons. This means that as archaeologists excavate horizontal tunnels into ancient Maya structures, they are essentially moving back in time. As earlier constructions are encountered, these tunnels may deviate in many directions in order to document architectural remains. The resultant excavations often become intricate labyrinths, extending dozens of meters. Traditional forms of archaeological documentation, such as photographs, plan views, and profile drawings, are limited in their ability to convey the complexity of tunnel excavations. Terrestrial Lidar (light detection and ranging) instruments are able to generate precise 3D models of tunnel excavations. This article presents the results of a model created with a Faro™ Focus 3D 120 Scanner of tunneling excavations at the site of El Zotz, Guatemala. The lidar data document the excavations inside a large mortuary pyramid, including intricately decorated architecture from an Early Classic (A.D. 300-600) platform buried within the present form of the structure. Increased collaboration between archaeologists and scholars with technical expertise maximizes the effectiveness of 3D models, as does presenting digital results in tandem with traditional forms of documentation.}, doi = {10.7183/2326-3768.4.2.192}, publisher = {Cambridge University Press}, url = {https://www.cambridge.org/core/journals/advances-in-archaeological-practice/article/tunnel-vision/9E76CC032829E477BCFB28D1032DFE77}, diff --git a/_deploy_e4e.sh b/_deploy_e4e.sh new file mode 100644 index 00000000..dd545b70 --- /dev/null +++ b/_deploy_e4e.sh @@ -0,0 +1,17 @@ +set -e -x +PATH="$PATH:/home/deploy/.nvm/versions/node/v22.3.0/bin:/home/deploy/.rbenv/shims" +echo $PATH +HOME_DIR=`pwd` +CLONE_DIR="/home/deploy/workspace/e4e_tools/website2.0" +JEKYLL_ENV="production" +# git clone --depth 1 --single-branch --branch=$BRANCH https://github.com/UCSD-E4E/website2.0.git $CLONE_DIR +cd $CLONE_DIR +git fetch --all +git reset --hard origin/main +mkdir -p $CLONE_DIR/cache +mkdir -p $CLONE_DIR/cache/resize +npm ci +npx gulp build -j '--config _config.yml' +date >> _site/last_deployed.txt +rsync -r -e "ssh -i $HOME/.ssh/id_e4edev" --progress $CLONE_DIR/_site/ e4e@e4e.ucsd.edu:htdocs/ +cd $HOME_DIR diff --git a/_layouts/project_gallery.html b/_layouts/project_gallery.html index e9d9b719..0d154ab3 100644 --- a/_layouts/project_gallery.html +++ b/_layouts/project_gallery.html @@ -16,7 +16,7 @@