-
Notifications
You must be signed in to change notification settings - Fork 0
/
publications.bib
411 lines (373 loc) · 51 KB
/
publications.bib
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
@InProceedings{ayers_perry_prestrelski_etal_neurips_2024,
title={A Deep Learning Approach to the Automated Segmentation of Bird Vocalizations from Weakly Labeled Crowd-sourced Audio},
author={Ayers, Jacob and Perry, Sean and Prestrelski, Samantha and Zhang, Tianqi and von Schoenfeldt, Ludwig and Blue, Mugen and Steinberg, Gabriel and Tobler, Mathias and Ingram, Ian and Schurgers, Curt and Kastner, Ryan},
booktitle={NeurIPS 2024 Workshop on Tackling Climate Change with Machine Learning},
url={https://www.climatechange.ai/papers/neurips2024/8},
year={2024},
month=dec,
abstract={Ecologists interested in monitoring the effects caused by climate change are increasingly turning to passive acoustic monitoring, the practice of placing autonomous audio recording units in ecosystems to monitor species richness and occupancy via species calls. However, identifying species calls in large datasets by hand is an expensive task, leading to a reliance on machine learning models. Due to a lack of annotated datasets of soundscape recordings, these models are often trained on large databases of community created focal recordings. A challenge of training on such data is that clips are given a "weak label," a single label that represents the whole clip. This includes segments that only have background noise but are labeled as calls in the training data, reducing model performance. Heuristic methods exist to convert clip-level labels to "strong" call-specific labels, where the label tightly bounds the temporal length of the call and better identifies bird vocalizations. Our work improves on the current weakly to strongly labeled method used on the training data for BirdNET, the current most popular model for audio species classification. We utilize an existing RNN-CNN hybrid, resulting in a precision improvement of 12% (going to 90% precision) against our new strongly hand-labeled dataset of Peruvian bird species.},
}
@MastersThesis{Ayers2024,
title={An Exploration of Automated Methods for the Efficient Acquisition of Training Data for Acoustic Species Identification},
author={Ayers, Jacob Glenn},
year={2024},
month=jun,
school={University of California San Diego},
abstract={Passive acoustic monitoring is a field that strives to understand the health of ecosystems around the world through the acoustics of natural soundscapes. By identifying fauna vocalizations within soundscapes, we begin to build a quantitative understanding of local biodiversity populations, a key indicator of ecosystem health. The reduced cost of audio recorders have enabled researchers to collect datasets at a scale untenable in years past. These datasets are too vast for exhaustive human identification of species vocalizations. To which, researchers hope to train deep learning models for automated acoustic species identification to mitigate the burden of human labor.},
url={https://escholarship.org/content/qt3xk2377r/qt3xk2377r.pdf},
}
@Article{WallaceGurungKastner_JCGI_2024,
author = {Wallace, Ronan and Gurung, Yungdrung Tsewang and Kastner, Ryan},
journal = {Journal of Critical Global Issues},
title = {Relocating Lubra Village and Visualizing Himalayan Flood Damages with Remote Sensing},
year = {2024},
issn = {2997-0083},
month = feb,
number = {1},
volume = {1},
abstract = {As weather patterns change worldwide, isolated communities impacted by climate change go unnoticed and we need community-driven solutions. In Himalayan Mustang, Nepal, indigenous Lubra Village faces threats of increasing flash flooding. After every flood, residual muddy sediment hardens across the riverbed like concrete, causing the riverbed elevation to rise. As elevation increases, sediment encroaches on Lubra’s agricultural fields and homes, magnifying flood vulnerability. In the last monsoon season alone, the Lubra community witnessed floods swallowing several agricultural fields and damaging two homes. One solution considers relocating the village to a new location entirely. However, relocation poses a challenging task, as eight centuries of ancestry, heritage, and nuanced cultural complexities exist in both aspects of communal opinion and civil engineering. To investigate this issue further, we utilize remote sensing technologies such as drones and satellite imagery to create unique, highly detailed 3D visualizations and 2D maps to document climate-related impacts in Lubra Village. We also investigate quantifying riverbed elevation trends with digital elevation models to address how the riverbed elevation changes overtime. In tandem, we conduct oral interviews with members of Lubra to understand how flooding and droughts affect their ways of life, allowing us to contextualize these models. Pairing visualized data with personal accounts, we provide an informative story that depicts Himalayan climate change on a local level for supporting Lubra in informing local policy and requesting relief aid.},
doi = {10.62895/2997-0083.1006},
publisher = {School for International Training},
}
@MastersThesis{Crutchfield2023,
author = {Christopher L. Crutchfield},
school = {University of California San Diego},
title = {{S}pot, an {A}lgorithm for {L}ow-{R}esolution, {L}ow-{C}ontrast, {M}oving {O}bject-{T}racking with a {N}on-{S}tationary {C}amera},
year = {2023},
month = jun,
abstract = {The ability to track moving objects in a video stream is helpful for many applications,
from pedestrian and vehicle tracking in a city to animal tracking for ecology and conservation.
This write-up introduces Spot, an algorithm for moving object tracking in low-resolution, low-
contrast videos. This write-up will discuss two motivating examples to guide the development of
Spot-satellite-based surveillance of vehicles in cityscapes and animal tracking using drones for
ecological purposes.
Spot uses image processing techniques to generate a pipeline to track moving objects
frame-to-frame. It then leverages Bayesian Filtering techniques to use the frame-to-frame motion to track individual identity between consecutive frames.
Each stage of Spot’s pipeline–both image processing and the Bayesian Filtering portions
of the pipeline–introduces many parameters. To determine which parameters are ideal for a
particular dataset, a design space exploration tool, dubbed Sherlock, is used to choose the optimal
parameters. As part of this, we evaluate multiple possible objective functions and demonstrate
the importance of selecting an appropriate one.
Spot is competitive with other modern, moving object-tracking algorithms on cityscape
data, outperforming others in some of the metrics presented. For tracking animals from drone
footage, Spot demonstrated an ability to track wildlife at a similar rate to its performance in
some cityscape videos.},
file = {:Crutchfield2023 - Spot, an Algorithm for Low Resolution, Low Contrast, Moving Object Tracking with a Non Stationary Camera.pdf:PDF},
url = {https://escholarship.org/uc/item/14j7c3qc},
}
@MastersThesis{Hicks_2023,
author = {Hicks, Stanley Dillon},
school = {UC San Diego},
title = {Remote Sensing of Mangroves using Machine Learning based on Satellite and Aerial Imagery},
year = {2023},
address = {La Jolla, California},
abstract = {Mangrove forests are critical to mitigating climate change and provide many essential benefits to their ecosystems and local environments but are under threat due to deforestation. However, monitoring mangroves through remote sensing can help pinpoint and alleviate the causes of their deforestation. Machine learning can be used with remotely sensed low-resolution satellite or high-resolution aerial imagery to automatically create mangrove extent maps with higher accuracy and frequency than previously possible. This study explores and offers recommendations for two practical scenarios. In the first practical scenario, where only low-resolution hyperspectral satellite imagery is acquired, we implemented several classical machine learning models and applied these results to data acquired in the Clarendon parish of Jamaica. We found that utilizing extensive feature engineering and hyperspectral bands can result in strong performance for mangrove extent classification, with an accuracy of 93% for our extremely randomized trees model. In the second practical scenario, we explored when there is full coverage of both low-resolution satellite and high-resolution aerial imagery over a survey area. We created a hybrid model which fuses low-resolution pixels and high-resolution imagery, achieving an accuracy of 97% when applied to a dataset based in Baja California Sur, Mexico, offering another high-performance method to automatically create mangrove extent maps if both high- and low-resolution imagery is available. Overall, the methods tested over these two scenarios provide stakeholders flexibility in data and methods used to achieve accurate, automatic mangrove extent measurement, enabling more frequent mangrove monitoring and further enabling the protection of these important ecosystems.},
language = {eng},
publisher = {University of California, San Diego},
url = {https://escholarship.org/uc/item/4pf2f7tr},
}
@Article{bresnehan_cyronak_brewin_etal_csr_2022,
author = {Philip Bresnahan and Tyler Cyronak and Robert J.W. Brewin and Andreas Andersson and Taylor Wirth and Todd Martz and Travis Courtney and Nathan Hui and Ryan Kastner and Andrew Stern and Todd McGrain and Danica Reinicke and Jon Richard and Katherine Hammond and Shannon Waters},
journal = {Continental Shelf Research},
title = {A high-tech, low-cost, Internet of Things surfboard fin for coastal citizen science, outreach, and education},
year = {2022},
issn = {0278-4343},
pages = {104748},
volume = {242},
abstract = {Coastal populations and hazards are escalating simultaneously, leading to an increased importance of coastal ocean observations. Many well-established observational techniques are expensive, require complex technical training, and offer little to no public engagement. Smartfin, an oceanographic sensor–equipped surfboard fin and citizen science program, was designed to alleviate these issues. Smartfins are typically used by surfers and paddlers in surf zone and nearshore regions where they can help fill gaps between other observational assets. Smartfin user groups can provide data-rich time-series in confined regions. Smartfin comprises temperature, motion, and wet/dry sensing, GPS location, and cellular data transmission capabilities for the near-real-time monitoring of coastal physics and environmental parameters. Smartfin's temperature sensor has an accuracy of 0.05 °C relative to a calibrated Sea-Bird temperature sensor. Data products for quantifying ocean physics from the motion sensor and additional sensors for water quality monitoring are in development. Over 300 Smartfins have been distributed around the world and have been in use for up to five years. The technology has been proven to be a useful scientific research tool in the coastal ocean—especially for observing spatiotemporal variability, validating remotely sensed data, and characterizing surface water depth profiles when combined with other tools—and the project has yielded promising results in terms of formal and informal education and community engagement in coastal health issues with broad international reach. In this article, we describe the technology, the citizen science project design, and the results in terms of natural and social science analyses. We also discuss progress toward our outreach, education, and scientific goals.},
doi = {10.1016/j.csr.2022.104748},
keywords = {Coastal oceanography, Citizen science, Surfing, Sea surface temperature, Outreach},
url = {https://www.sciencedirect.com/science/article/pii/S0278434322001029},
}
@Article{wong_humphrey_switzer_etal_icuns_2022,
author = {Wong, Emily and Humphrey, Isabella and Switzer, Scott and Crutchfield, Christopher and Hui, Nathan and Schurgers, Curt and Kastner, Ryan},
title = {Underwater Depth Calibration Using a Commercial Depth Camera},
year = {2022},
abstract = {Depth cameras are increasingly used in research and industry in underwater settings. However, cameras that have been calibrated in air are notably inaccurate in depth measurements when placed underwater, and little research has been done to explore pre-existing depth calibration methodologies and their effectiveness in underwater environments. We used four methods of calibration on a low-cost, commercial depth camera both in and out of water. For each of these methods, we compared the predicted distance and length of objects from the camera with manually measured values to get an indication of depth and length accuracy. Our findings indicate that the standard methods of calibration in air are largely ineffective for underwater calibration and that custom calibration techniques are necessary to achieve higher accuracy.},
address = {New York, NY, USA},
articleno = {22},
booktitle = {Proceedings of the 16th International Conference on Underwater Networks & Systems},
doi = {10.1145/3567600.3568158},
isbn = {9781450399524},
keywords = {depth camera calibration, Underwater stereo vision},
location = {Boston, MA, USA},
numpages = {5},
publisher = {Association for Computing Machinery},
series = {WUWNet '22},
url = {https://doi.org/10.1145/3567600.3568158},
}
@InProceedings{ayers_jandali_hwang_etal_icml_2021,
author = {Ayers, Jacob and Jandali, Yaman and Hwang, Yoo Jin and Steinberg, Gabriel and Joun, Erika and Tobler, Mathias and Ingram, Ian and Kastner, Ryan and Schurgers, Curt},
booktitle = {38th International Conference on Machine Learning},
title = {Challenges in Applying Audio Classification Models to Datasets Containing Crucial Biodiversity Information},
year = {2021},
month = jul,
volume = {38},
abstract = {The acoustic signature of a natural soundscape can reveal consequences of climate change on biodiversity. Hardware costs, human labor time, and expertise dedicated to labeling audio are impediments to conducting acoustic surveys across a representative portion of an ecosystem. These barriers are quickly eroding away with the advent of low-cost, easy to use, open source hardware and the expansion of the machine learning field providing pre-trained neural networks to test on retrieved acoustic data. One consistent challenge in passive acoustic monitoring (PAM) is a lack of reliability from neural networks on audio recordings collected in the field that contain crucial biodiversity information that otherwise show promising results from publicly available training and test sets. To demonstrate this challenge, we tested a hybrid recurrent neural network (RNN) and convolutional neural network (CNN) binary classifier trained for bird presence/absence on two Peruvian bird audiosets. The RNN achieved an area under the receiver operating characteristics (AUROC) of 95% on a dataset collected from Xeno-canto and Google’s AudioSet ontology in contrast to 65% across a stratified random sample of field recordings collected from the Madre de Dios region of the Peruvian Amazon. In an attempt to alleviate this discrepancy, we applied various audio data augmentation techniques in the network’s training process which led to an AUROC of 77% across the field recordings},
url = {https://www.climatechange.ai/papers/icml2021/14/paper.pdf},
}
@inproceedings{ayers_perry_tiwari_et_al_neurips_2021,
title={Reducing the Barriers of Acquiring Ground-truth from Biodiversity Rich Audio Datasets Using Intelligent Sampling Techniques},
author={Ayers, Jacob G and Perry, Sean and Tiwari, Vaibhav and Blue, Mugen and Balaji, Nishant and Schurgers, Curt and Kastner, Ryan and Tobler, Mathias and Ingram, Ian},
booktitle={NeurIPS 2021 Workshop on Tackling Climate Change with Machine Learning},
url={https://www.climatechange.ai/papers/neurips2021/56},
year={2021}
}
@InProceedings{crutchfield_sutton_ngo_mb12_2021,
author = {Christopher L. Crutchfield and Jake Sutton and Anh Ngo and Emmanuel Zadorian and Gabrielle Hourany and Dylan Nelson and Alvin Wang and Fiona McHenry-Crutchfield and Deborah Forster and Shirley C. Strum and Ryan Kastner and Curt Schurgers},
booktitle = {12th International Conference on Methods and Techniques in Behavioral Research and 6th Seminar on Behavioral Methods},
title = {Baboons on the Move: Enhancing Understanding of Collective Decision Making through Automated Motion Detection from Aerial Drone Footage},
year = {2021},
month = oct,
pages = {33--39},
volume = {1},
doi = {10.6084/m9.figshare.13013717},
isbn = {978-90-74821-93-3},
url = {https://www.researchgate.net/profile/Andrew-Spink/publication/344557101_Volume_1_of_the_Proceedings_of_the_joint_meeting_of_the_12th_International_Conference_on_Measuring_Behavior_and_the_6th_Seminar_on_Behavioral_Methods_to_be_held_in_Krakow_Poland_October_15-18_2021/links/5f802f22458515b7cf71fa14/Volume-1-of-the-Proceedings-of-the-joint-meeting-of-the-12th-International-Conference-on-Measuring-Behavior-and-the-6th-Seminar-on-Behavioral-Methods-to-be-held-in-Krakow-Poland-October-15-18-2021.pdf#page=38},
}
@INCOLLECTION{hsu_dorian_qi_ucsd_2021,
author = {Astrid J. Hsu and John Dorian and Katherine Qi and Eric Lo and Benigno Guerror Martinez},
title = {Drone Imagery Processing Procedure},
publisher = {UC San Diego},
year = {2021},
booktitle={UC San Diego Conferences},
url={https://escholarship.org/uc/item/3ww8g75c}
}
@article{hui_lo_moss_et_al_jfr_2021,
author = {Hui, Nathan T. and Lo, Eric K. and Moss, Jen B. and Gerber, Glenn P. and Welch, Mark E. and Kastner, Ryan and Schurgers, Curt},
title = {A more precise way to localize animals using drones},
journal = {Journal of Field Robotics},
year = {2021},
keywords = {aerial robotics, environmental monitoring, exploration, rotorcraft},
doi = {https://doi.org/10.1002/rob.22017},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/rob.22017},
abstract = {Abstract Radio telemetry is a commonly used technique in conservation biology and ecology, particularly for studying the movement and range of individuals and populations. Traditionally, most radio telemetry work is done using handheld directional antennae and either direction-finding and homing techniques or radio-triangulation techniques. Over the past couple of decades, efforts have been made to utilize unmanned aerial vehicles to make radio-telemetry tracking more efficient, or cover more area. However, many of these approaches are complex and have not been rigorously field-tested. To provide scientists with reliable quality tracking data, tracking systems need to be rigorously tested and characterized. In this paper, we present a novel, drone-based, radio-telemetry tracking method for tracking the broad-scale movement paths of animals over multiple days and its implementation and deployment under field conditions. During a 2-week field period in the Cayman Islands, we demonstrated this system's ability to localize multiple targets simultaneously, in daily 10 min tracking sessions over a period of 2 weeks, generating more precise estimates than comparable efforts using manual triangulation techniques.}}
@InProceedings{perry_tiwari_balaji_reuns_2021,
author = {Perry, Sean and Tiwari, Vaibhav and Balaji, Nishant and Joun, Erika and Ayers, Jacob and Tobler, Mathias and Ingram, Ian and Kastner, Ryan and Schurgers, Curt},
booktitle = {2021 IEEE 18th International Conference on Mobile Ad Hoc and Smart Systems (MASS)},
title = {Pyrenote: a Web-based, Manual Annotation Tool for Passive Acoustic Monitoring},
year = {2021},
month = oct,
pages = {633--638},
abstract = {Passive acoustic monitoring (PAM) involves deploying audio recorders across a natural environment over a long period of time to collect large quantities of audio data. To parse through this data, researchers have worked with automated annotation techniques stemming from Digital Signal Processing and Machine Learning to identify key species calls and judge a region’s biodiversity. To apply and evaluate those techniques, one must acquire strongly labeled data that marks the exact temporal location of audio events in the data, as opposed to weakly labeled data which only labels the presence of an audio event across a clip.Pyrenote was designed to fit the demand for strong manual labels in PAM data. Based on Audino, an open-source, web-based, and easy-to-deploy audio annotation tool, Pyrenote displays a spectrogram for audio annotation, stores labels in a database, and optimizes the labeling process through simplifying the user interface to produce high-quality annotations in a short time frame. This paper documents Pyrenote’s functionality, how the challenge informed the design of the system, and how it compares to other labeling systems.},
doi = {10.1109/MASS52906.2021.00091},
issn = {2155-6814},
}
@Misc{qi_ucsd_2021,
author = {Qi, Katherine L.},
title = {Mangroves from the Sky: Comparing Remote Sensing Methods for Regional Analyses in Baja California Sur},
year = {2021},
abstract = {Consequences of global warming are causing mangrove migration from tropical habitats towards temperate zones. Forests at limits and transition zones are important to monitor for promoting local management and conservation efforts. The advancement of remote sensing technology in the past decade has allowed more insight into these habitats at large scales, and recent studies using satellite imagery have succeeded in creating baselines for global mangrove extent. However, the high surveying range comes with a cost of reduced resolution, causing gaps in areas with high fragmentation or low canopy height, such as in dwarf mangrove habitats. By using drones, we were able to conduct detailed analyses of canopy height distribution for dwarf mangroves in Baja California Sur. This new model provides a focused approach at analyzing parameters that contribute to the multidimensionality of mangrove forests with primarily remote sensing data. Additionally, improved biomass models were constructed with the drone data and compared against satellite data. Due to its inaccuracies in approximated mangrove extent and canopy height, satellite imagery significantly underestimates above ground biomass and carbon measurements in this region, and potentially dwarf mangroves in general. The pairing of satellite and drone imagery allows for a more robust view of mangrove ecosystems, which is critical in understanding their poleward movement with respect to climate change.},
address = {La Jolla, California},
booktitle = {Mangroves from the Sky: Comparing Remote Sensing Methods for Regional Analyses in Baja California Sur},
language = {eng},
publisher = {University of California, San Diego},
url = {https://escholarship.org/uc/item/8fm8j2fh},
}
@InProceedings{tueller_maddukuri_paxson_et_al_oceans_2021,
author = {Peter Tueller and Raghav Maddukuri and Patrick Paxson and Vivaswat Suresh and Arjun Ashok and Madison Bland and Ronan Wallace and Julia Guerrero and Brice Semmens and Ryan Kastner},
booktitle = {OCEANS 2021 MTS/IEEE SAN DIEGO},
title = {FishSense: Underwater RGBD Imaging for Fish Measurement and Classification},
year = {2021},
month = sep,
publisher = {IEEE},
abstract = {There is a need for reliable underwater fish monitoring systems that can provide oceanographers and researchers with valuable data about life underwater. Most current methods rely heavily on human observation which is both error prone and costly. FishSense provides a solution that accelerates the use of depth cameras underwater, opening the door to 3D underwater imaging that is fast, accurate, cost effective, and energy efficient. FishSense is a sleek handheld underwater imaging device that captures both depth and color images. This data has been used to calculate the length of fish, which can be used to derive biomass and health. The FishSense platform has been tested through two separate deployments. The first deployment imaged a toy fish of known length and volume within a controlled testing pool. The second deployment was conducted within an 70,000 gallon aquarium tank with multiple species of fish. A Receiver Operating Characteristic (ROC) curve has been computed based on the detector’s performance across all images, and the mean and standard deviation of the length measurements of the detections has been computed.},
url = {https://agu.confex.com/agu/OVS21/meetingapp.cgi/Paper/787405},
}
@Article{gautier_garrison_rushton_jchmsd_2020,
author = {Gautier, Quentin Kevin and Garrison, Thomas G and Rushton, Ferril and Bouck, Nicholas and Lo, Eric and Tueller, Peter and Schurgers, Curt and Kastner, Ryan},
journal = {Journal of Cultural Heritage Management and Sustainable Development},
title = {Low-cost 3D scanning systems for cultural heritage documentation},
year = {2020},
issn = {2044-1266},
number = {4},
pages = {437--455},
volume = {10},
abstract = {Digital documentation techniques of tunneling excavations at archaeological sites are becoming more common. These methods, such as photogrammetry and LiDAR (Light Detection and Ranging), are able to create precise three-dimensional models of excavations to complement traditional forms of documentation with millimeter to centimeter accuracy. However, these techniques require either expensive pieces of equipment or a long processing time that can be prohibitive during short field seasons in remote areas. This article aims to determine the effectiveness of various low-cost sensors and real-time algorithms to create digital scans of archaeological excavations.},
doi = {https://doi.org/10.1108/JCHMSD-03-2020-0032},
keywords = {archaeology, cultural heritage, documentation, surveying and recording, mapping},
url = {https://www.emerald.com/insight/content/doi/10.1108/JCHMSD-03-2020-0032/full/html},
}
@inproceedings{hicks_kastner_schurgers_et_all_neurips_2020,
title={Mangrove Ecosystem Detection using Mixed-Resolution Imagery with a Hybrid-Convolutional Neural Network},
author={Hicks, Dillon and Kastner, Ryan and Schurgers, Curt and Hsu, Astrid and Aburto, Octavio},
year={2020},
volume={},
number={},
pages={},
doi={},
booktitle={Thirty-fourth Conference on Neural Information Processing Systems Workshop: Tackling Climate Change with Machine Learning},
url={https://www.climatechange.ai/papers/neurips2020/23/paper.pdf},
abstract={Mangrove forests are rich in biodiversity and are a large contributor to carbon sequestration critical in the fight against climate change. However, they are currently under threat from anthropogenic activities, so monitoring their health, extent, and productivity is vital to our ability to protect these important ecosystems. Traditionally, lower resolution satellite imagery or high resolution unmanned air vehicle (UAV) imagery has been used independently to monitor mangrove extent, both offering helpful features to predict mangrove extent. To take advantage of both of these data sources, we propose the use of a hybrid neural network, which combines a Convolutional Neural Network (CNN) feature extractor with a Multilayer-Perceptron (MLP), to accurately detect mangrove areas using both medium resolution satellite and high resolution drone imagery. We present a comparison of our novel Hybrid CNN with algorithms previously applied to mangrove image classification on a data set we collected of dwarf mangroves from consumer UAVs in Baja California Sur, Mexico, and show a 95% intersection over union (IOU) score for mangrove image classification, outperforming all our baselines}}
@InProceedings{beluso_xu_patamasing_et_al_massw_2019,
author = {Beluso, Charmaine and Xu, Anfeng and Patamasing, Eamon and Sebastian, Brian and Lo, Eric and Schurgers, Curt and Kastner, Ryan and Chen, Liren and Yu, Xuanyi and Sturm, Dan and Barlow, Robert},
booktitle = {2019 IEEE 16th International Conference on Mobile Ad Hoc and Sensor Systems Workshops (MASSW)},
title = {D-SEA: The Underwater Depth Sensing Device for Standalone Time-Averaged Measurements},
year = {2019},
pages = {101--105},
abstract = {Access to accurate depth information is important for a wide variety of oceanographic science applications. For example, it is crucial in the creation of 3D models. Currently, divers are manually measuring the depth by using dive watches, but this method is inconsistent because of variable depth readings caused by changing wave heights and human errors. To combat these problems, we created the Depth-Sensor Enclosed Application (D-SEA) to automatically collect and average pressure data while displaying the calculated depth readings underwater. To use D-SEA, the user places it on top of the area of study to measure and gather the underwater depth readings over time. We are working on an affordable, waterproof prototype with a display that is readable underwater, an automatic transition between on and off states when submerged in seawater, and automatic data logging onto an SD card. From testing the recent prototype, results show that D-SEA lasted for weeks in the sleep state and days in the wake state while under depths of 4.40 meters.},
doi = {10.1109/MASSW.2019.00027},
url = {https://ieeexplore.ieee.org/document/9059518},
}
@INCOLLECTION{hsu_lo_dorian_et_al_ucsd_2019,
author = {Astrid J. Hsu and Eric Lo and John Dorian and Katherine Qi and Matthew T. Costa and Benigno Guerrero Martinez},
title = {Lessons on monitoring mangroves},
booktitle = {UC San Diego: Aburto Lab},
publisher = {UC San Diego},
year = {2019},
url={https://escholarship.org/uc/item/3bg3206z}}
@misc{hui_ucsd_2019,
abstract = {Radio telemetry is a critical technique in conservation ecology, particularly for studying the movement and range of individuals and populations. Traditionally, most radio telemetry work is done using handheld directional antennae by using either direction-finding and homing techniques, or radio-triangulation techniques. Over the past couple decades, efforts have been made to utilize aerial vehicles to make radio telemetry tracking more efficient, or cover more area. However, many these approaches require the use of manned aircraft and specialist skill sets. The proliferation of small unmanned aerial systems (SUAS) with high reliability and ease of use, as well as recent development and application of robotic sensing and estimation, opens up the possibility of leveraging SUAS to conduct radio telemetry studies. In this thesis, I present the results of five years of development as well as the testing and deployment of a drone-based radio-telemetry tracking system that is able to track multiple targets simultaneously while operating in field conditions as part of a field expedition.},
author = {Hui, Nathan},
address = {La Jolla, Calif},
booktitle = {Efficient Drone-based Radio Tracking of Wildlife},
keywords = {Drone, Radio Tracking, SUAS, Wildlife Telemetry},
language = {eng},
publisher = {University of California, San Diego},
title = {Efficient Drone-based Radio Tracking of Wildlife},
year = {2019},
url={https://escholarship.org/uc/item/4574s85j}}
@InProceedings{cain_merchant_avendano_bigdata_2018,
author = {Cain, Brennan and Merchant, Zain and Avendano, Indira and Richmond, Dustin and Kastner, Ryan},
booktitle = {2018 IEEE International Conference on Big Data (Big Data)},
title = {PynqCopter - An Open-source FPGA Overlay for UAVs},
year = {2018},
pages = {2491--2498},
abstract = {FPGAs are a computing platform that excel in performing signal processing, control, networking, and security in a high performance and power efficient manner. This makes FPGAs attractive for unmanned aerial vehicles (UAVs) especially as they require smaller payloads and are processing multiple high data rate input sources (e.g. cameras, lidar, radar, gyroscopes, accelerometers). Unfortunately, FPGAs are notoriously difficult to program and they require significant hardware design expertise. However, there are newly released design tools aimed at making FPGAs easier to use, which drove the initial hypothesis for this paper: could three undergraduates program an FPGA to control a UAV in 10 weeks? The result of the experiment is PynqCopter - an open source control system implemented on an FPGA. We created and tested a UAV overlay which is able to run multiple computations in parallel, allowing for the ability to process high amounts of data at runtime.},
doi = {10.1109/BigData.2018.8622102},
url = {https://ieeexplore.ieee.org/document/8622102},
}
@inproceedings{epperson_rotenberg_bryn_et_al_esa_2018,
title={Seeing the forest from the trees: using drone imagery and deep learning to characterize rainforest in Southern Belize},
author={Epperson, Matthew P and Rotenberg, James A and Bryn, Griffin L and Lo, Eric K and Afshari, Sebastian and Kastner, Ryan and Schurgers, Curt and Thomas, Alexis},
booktitle={2018 ESA Annual Meeting (August 5--10)},
year={2018},
organization={ESA},
URL={https://eco.confex.com/eco/2018/meetingapp.cgi/Paper/74918},
abstract={Tropical rainforests worldwide are negatively impacted from a variety of human-caused threats. Unfortunately, our ability to study these rainforests is impeded by logistical problems such as their physical inaccessibility, expensive aerial imagery, and/or coarse satellite data. One solution is the use of low-cost, Unmanned Aerial Vehicles (UAV), commonly referred to as drones. Drones are now widely recognized as a tool for ecology, environmental science, and conservation, collecting imagery that is superior to satellite data in resolution. We asked: Can we take advantage of the sub-meter, high-resolution imagery to detect specific tree species or groups, and use these data as indicators of rainforest functional traits and characteristics?
We demonstrate a low-cost method for obtaining high-resolution aerial imagery in a rainforest of Belize using a drone over three sites in two rainforest protected areas. We built a workflow that uses Structure from Motion (SfM) on the drone images to create a large orthomosaic and a Deep Convolutional Neural Networks (CNN) to classify indicator tree species. We selected: 1) Cohune Palm (Attalea cohune) as they are indicative of past disturbance and current soil condition; and, 2) the dry-season deciduous tree group since deciduousness is an important ecological factor of rainforest structure and function.}}
@InProceedings{meyer_devilla_salameh_et_al_aeroconf_2017,
author = {Meyer, Dominique E. and De Villa, Miguel and Salameh, Ihab and Fraijo, Elioth and Kastner, Ryan and Schurgers, Curt and Kuester, Falko},
booktitle = {2017 IEEE Aerospace Conference},
title = {Rapid design and manufacturing of task-specific autonomous paragliders using 3D printing},
year = {2017},
pages = {1--9},
abstract = {This paper explores a paraglider unmanned aerial vehicle (UAV) concept, using rapid design and payload manufacturing techniques to achieve task specific functions. Autonomous fixed wing, multi-rotor and mono-rotor vehicles require prolonged durations of design, manufacturing and tuning to obtain reliable UAVs. Using 3D printing on the meter-scale, we are able to rapidly integrate sensors and alternative payloads into the suspended fuselage of the paraglider. Additive manufacturing has allowed complex designs to be created which provide greater strength and versatility at lower costs compared to the traditional machining method. This manufacturing type has allowed us to produce weekly prototypes for testing. The latest parafoils have yielded higher airspeeds and stable collapse recovery behavior making them interesting for UAV use beyond dirigeable parachutes. The pendulum nature of the platform is self-stabilizing and allows the discrete proportional-integral-derivative (PID) controller to adapt based on mass alteration of the suspended body. We describe modular designs, stabilization algorithms and applications in the imaging of cultural heritage sites for conservation.},
doi = {10.1109/AERO.2017.7943914},
url = {https://ieeexplore.ieee.org/document/7943914},
}
@Article{tolkova_bauer_wilby_et_al_jasa_2017,
author = {Tolkova,Irina and Bauer,Lisa and Wilby,Antonella and Kastner,Ryan and Seger,Kerri},
journal = {The Journal of the Acoustical Society of America},
title = {Automatic classification of humpback whale social calls},
year = {2017},
number = {5},
pages = {3605--3605},
volume = {141},
abstract = {Acoustic methods are an established technique to monitor marine mammal populations and behavior, but developments in computer science can expand the current capabilities. A central aim of these methods is the automated detection and classification of marine mammal vocalizations. While many studies have applied bioacoustic methods to cetacean calls, there has been limited success with humpback whale (Megaptera novaeangliae) social call classification, which has largely remained a manual task in the bioacoustics community. In this project, we automated this process by analyzing spectrograms of calls using PCA-based and connected-component-based methods, and derived features from relative power in the frequency bins of these spectrograms. These features were used to train and test a supervised Hidden Markov Model (HMM) algorithm to investigate classification feasibility. We varied the number of features used in this analysis by varying the sizes of frequency bins. Generally, we saw an increase in precision, recall, and accuracy for all three classified groups, across the individual data sets, as the number of features decreased. We will present the classification rates of our algorithm across multiple model parameters. Since this method is not specific to humpback whale vocalizations, we hope it will prove useful in other acoustic applications.},
doi = {10.1121/1.4987715},
url = {https://doi.org/10.1121/1.4987715},
}
@InProceedings{webber_hui_kastner_et_al_ieee_icnc_2017,
author = {Webber, Daniel and Hui, Nathan and Kastner, Ryan and Schurgers, Curt},
booktitle = {2017 International Conference on Computing, Networking and Communications (ICNC)},
title = {Radio receiver design for Unmanned Aerial wildlife tracking},
year = {2017},
pages = {942--946},
abstract = {The use of radio collars is a common method wildlife biologists use to study behavior patterns in animals. Tracking a radio collar from the ground is time consuming and arduous. This task becomes more difficult as the size and output power decreases to accommodate animals as small as an iguana. Our solution is to fly a low cost Unmanned Aerial System equipped with a sensitive receiver chain to locate several transponders at once. The challenge is that the system needs to be low cost and be able to detect the transponder within a range of tens of feet. Initial ground tests indicate that the system was able to detect a collar 70 feet away for under \$100.},
doi = {10.1109/iccnc.2017.7876260},
keywords = {radio receiver design, unmanned aerial wildlife tracking, radio collars, wildlife biologists, behavior patterns, low cost unmanned aerial system, sensitive receiver chain, transponders},
url = {https://ieeexplore.ieee.org/document/7876260},
}
@Article{garrison_richmond_naughton_aap_2016,
author = {Garrison, Thomas G. and Richmond, Dustin and Naughton, Perry and Lo, Eric and Trinh, Sabrina and Barnes, Zachary and Lin, Albert and Schurgers, Curt and Kastner, Ryan and Newman, Sarah E. and et al.},
journal = {Advances in Archaeological Practice},
title = {Tunnel Vision: Documenting Excavations in Three Dimensions with Lidar Technology},
year = {2016},
number = {2},
pages = {192--204},
volume = {4},
abstract = {Archaeological tunneling is a standard excavation strategy in Mesoamerica. The ancient Maya built new structures atop older ones that were no longer deemed usable, whether for logistical or ideological reasons. This means that as archaeologists excavate horizontal tunnels into ancient Maya structures, they are essentially moving back in time. As earlier constructions are encountered, these tunnels may deviate in many directions in order to document architectural remains. The resultant excavations often become intricate labyrinths, extending dozens of meters. Traditional forms of archaeological documentation, such as photographs, plan views, and profile drawings, are limited in their ability to convey the complexity of tunnel excavations. Terrestrial Lidar (light detection and ranging) instruments are able to generate precise 3D models of tunnel excavations. This article presents the results of a model created with a Faro™ Focus 3D 120 Scanner of tunneling excavations at the site of El Zotz, Guatemala. The lidar data document the excavations inside a large mortuary pyramid, including intricately decorated architecture from an Early Classic (A.D. 300–600) platform buried within the present form of the structure. Increased collaboration between archaeologists and scholars with technical expertise maximizes the effectiveness of 3D models, as does presenting digital results in tandem with traditional forms of documentation.},
doi = {10.7183/2326-3768.4.2.192},
publisher = {Cambridge University Press},
url = {https://www.cambridge.org/core/journals/advances-in-archaeological-practice/article/tunnel-vision/9E76CC032829E477BCFB28D1032DFE77},
}
@Article{meyer_lo_afshari_et_al_saa_2016,
author = {Meyer, Dominique E and Lo, Eric and Afshari, Sebastian and Vaughan, Andrew and Rissolo, Dominique and Kuester, Falko},
journal = {The SAA Archaeological Record},
title = {Utility of low-cost drones to generate 3D models of archaeological sites from multisensor data},
year = {2016},
number = {2},
pages = {22--24},
volume = {16},
abstract = {With the emergence of low-cost multicopters on the market, archaeologists have rapidly integrated aerial imaging and photogrammetry with more traditional methods of site documentation. Unmanned Aerial Vehicles (UAVs) serve as simple yet transformative tools that can rapidly map archaeological sites.
The ancient Maya port site of Conil is located along the Laguna Holbox of northern Quintana Roo, Mexico. Established as early as 200 B.C., Conil supported a sizable population well into the Colonial period (Andrews 2020). Initial excavations were conducted by William T. Sanders (1955, 1960). Conil appears to have played a significant role in facilitating coastal trade along the northern coast of the Yucatan Peninsula. The aim of the aerial surveying was to obtain an accurate Digital Elevation Model (DEM) of the site that could be compared to a model that was created using a ground total station (Glover 2006).},
url = {http://onlinedigeditions.com/publication/?i=293420&article_id=2421572&view=articleBrowser},
}
@InProceedings{wilby_kastner_hostler_et_al_oceans_2016,
author = {Wilby, Antonella and Kastner, Ryan and Hostler, Andrew and Slattery, Ethan},
booktitle = {OCEANS 2016 MTS/IEEE Monterey},
title = {Design of a low-cost and extensible acoustically-triggered camera system for marine population monitoring},
year = {2016},
pages = {1--9},
abstract = {As the health of the ocean continues to decline, more and more marine populations are at risk of extinction. A significant challenge facing conservation biologists is the ability to effectively monitor at-risk populations due to the challenges of the underwater environment. Obtaining visual data on a marine species typically requires significant time spent by humans observing in the field, which is both costly and time-consuming, and often yields a small amount of data. We present a low-cost, acoustically-triggered camera system to enable remote monitoring and identification of marine populations.},
doi = {10.1109/OCEANS.2016.7761320},
}
@inproceedings{wilby_slattery_hostler_et_al_wuwnet_2016,
author = {Wilby, Antonella and Slattery, Ethan and Hostler, Andrew and Kastner, Ryan},
title = {Autonomous Acoustic Trigger for Distributed Underwater Visual Monitoring Systems},
year = {2016},
isbn = {9781450346375},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/2999504.3001080},
doi = {10.1145/2999504.3001080},
abstract = {The ability to obtain reliable, long-term visual data in marine habitats has the potential to transform biological surveys of marine species. However, the underwater environment poses several challenges to visual monitoring: turbidity and light attenuation impede the range of optical sensors, biofouling clouds lenses and underwater housings, and marine species typically range over a large area, far outside of the range of a single camera sensor. Due to these factors, a continuously-recording or time-lapse visual sensor will not be gathering useful data the majority of the time, wasting battery life and filling limited onboard storage with useless images. These limitations make visual monitoring difficult in marine environments, but visual data is invaluable to biologists studying the behaviors and interactions of a species. This paper describes an acoustic-based, autonomous triggering approach to counter the current limitations of underwater visual sensing, and motivates the need for a distributed sensor network for underwater visual monitoring.},
booktitle = {Proceedings of the 11th ACM International Conference on Underwater Networks & Systems},
articleno = {10},
numpages = {5},
keywords = {autonomous monitoring, underwater cameras, acoustic triggering, biological surveys},
location = {Shanghai, China},
series = {WUWNet '16}
}
@InProceedings{yeakle_naughton_kastner_et_al_oceans_2016,
author = {Yeakle, Riley and Naughton, Perry and Kastner, Ryan and Schurgers, Curt},
booktitle = {OCEANS 2016 MTS/IEEE Monterey},
title = {Inter-node distance estimation from ambient acoustic noise in mobile underwater sensor arrays},
year = {2016},
pages = {1--8},
abstract = {As the number of units in underwater sensor arrays grow, low-cost localization becomes increasingly important to maintain network scalability. Methods using ambient ocean noise are promising solutions because they do not require external infrastructure, nor expensive on-board sensors. Here we extend past work in stationary array element localization from correlations of ambient noise to a mobile sensor array [1]. After obtaining inter-node distance estimates using ambient noise correlations, these distances can be used to determine a relative localization of an array of mobile underwater sensor platforms without introducing any external infrastructure or on-board localization sensors. In this work we explore the effects of receiver mobility on inter-node distance estimation via correlations of ambient acoustic noise. Through analysis and simulation, we develop an exact solution along with a more tractable approximation to the peak amplitude of the Time-Domain Green's Function between the two mobile receivers, which provides an estimate of their spatial separation. Here we demonstrate that the mobile noise correlation amplitude at the time delay for a sound wave traveling from one receiver to the other can be modeled with the wideband ambiguity function of a single sound source. We then use this approximation to discuss selection of design parameters and their effects on the noise correlation function.},
doi = {10.1109/OCEANS.2016.7761475},
url = {https://ieeexplore.ieee.org/document/7761475},
}
@InProceedings{meyer_hess_lo_et_al_digitalheritage_2015,
author = {Meyer, Dominique and Hess, Michael and Lo, Eric and Wittich, Christine E. and Hutchinson, Tara C. and Kuester, Falko},
booktitle = {2015 Digital Heritage},
title = {UAV-based post disaster assessment of cultural heritage sites following the 2014 South Napa Earthquake},
year = {2015},
pages = {421--424},
volume = {2},
abstract = {On Sunday, August 24, 2014, the American Canyon (South Napa) Earthquake occurred at 3:20am local time with moment magnitude MW of 6.1, causing damage with and estimated economic impact of over one billion dollars. Many historic landmarks were severely damaged, some too dangerous to access, while others where simply difficult or impossible to reach quickly by conventional means. This paper explores semi- automatic surveying techniques using unmanned aerial vehicles (UAVs) to support immediate post-earthquake perishable data collection and damage assessment in the context of a case study for the Trefethen Family Vineyard. This case study will examine the methods used to create 3D models through Structure from Motion, which uses photogrammetric data to recreate the geometry of the site being imaged. The ability to recreate accurate models of real world objects rapidly, using images from low cost drones underlines the increasing feasibility of using UAVs for emergency response scenarios.},
doi = {10.1109/DigitalHeritage.2015.7419539},
url = {https://ieeexplore.ieee.org/document/7419539},
}
@InProceedings{santos_barnes_lo_et_al_ieee_mass_2014,
author = {Santos, Gilberto Antonio Marcon Dos and Barnes, Zachary and Lo, Eric and Ritoper, Bryan and Nishizaki, Lauren and Tejeda, Xavier and Ke, Alex and Lin, Han and Schurgers, Curt and Lin, Albert and Kastner, Ryan},
booktitle = {2014 IEEE 11th International Conference on Mobile Ad Hoc and Sensor Systems},
title = {Small Unmanned Aerial Vehicle System for Wildlife Radio Collar Tracking},
year = {2014},
pages = {761--766},
abstract = {This paper describes a low cost system for tracking wildlife that is equipped with radio collars. Currently, researchers have to physically go into the field with a directional antenna to try to pinpoint the VHF (very high frequency) signal originating from a wildlife tracking collar. Depending on the terrain, it could take an entire day to locate a single animal. To vastly improve upon this traditional approach, the system proposed here utilizes a small fixed-wing aircraft drone with a simple radio on-board, flying an automated mission. Received signal strength is recorded, and used to create a heat map that shows the collar's position. A prototype of this system was built using off-the-shelf hardware and custom signal processing algorithms. Initial field tests confirm the systems capabilities and its promise for wildlife tracking.},
doi = {10.1109/mass.2014.48},
keywords = {signal to noise ratio, global positioning system, finite element analysis, wildlife, hardware, receivers, aircraft, autonomous aerial vehicles, directional antennas, radio tracking, signal processing, small unmanned aerial vehicle system, wildlife radio collar tracking, VHF signal, custom signal processing algorithms, small fixed-wing aircraft drone, wildlife telemetry, software-defined radio, digital signal processing},
url = {https://ieeexplore.ieee.org/document/7035779},
}
@Comment{jabref-meta: databaseType:bibtex;}
@Comment{jabref-meta: keypatterndefault:[authors3]_[journal:abbr:regex("[a-z\\&]","")][booktitle:abbr:regex("[a-z\\&]","")]_[year];}
@Comment{jabref-meta: saveActions:enabled;
all-text-fields[identity]
date[normalize_date]
month[normalize_month]
pages[normalize_page_numbers]
;}
@Comment{jabref-meta: saveOrderConfig:specified;year;true;author;false;title;true;}