Changes
On June 6, 2023 at 3:11:25 PM CDT, kelsey-friesenumanitoba-ca:
-
Changed value of field
schemeURI
tohttps://www.polardata.ca/pdcinput/public/keywordlibrary
in Detection and tracking of belugas, kayaks and motorized boats in drone video using deep learning -
Changed value of field
subjectScheme
toPolar Data Catalogue
in Detection and tracking of belugas, kayaks and motorized boats in drone video using deep learning
f | 1 | { | f | 1 | { |
2 | "Author": [ | 2 | "Author": [ | ||
3 | { | 3 | { | ||
4 | "affiliation": "Centre for Earth Observation Science - | 4 | "affiliation": "Centre for Earth Observation Science - | ||
5 | University of Manitoba", | 5 | University of Manitoba", | ||
6 | "creatorName": "Harasyn, Madison", | 6 | "creatorName": "Harasyn, Madison", | ||
7 | "email": "Madison.harasyn@umanitoba.ca", | 7 | "email": "Madison.harasyn@umanitoba.ca", | ||
8 | "nameIdentifier": "https://orcid.org/0000-0002-5741-6766", | 8 | "nameIdentifier": "https://orcid.org/0000-0002-5741-6766", | ||
9 | "nameIdentifierScheme": "ORCID", | 9 | "nameIdentifierScheme": "ORCID", | ||
10 | "nameType": "Personal", | 10 | "nameType": "Personal", | ||
11 | "schemeURI": "http://orcid.org/" | 11 | "schemeURI": "http://orcid.org/" | ||
12 | }, | 12 | }, | ||
13 | { | 13 | { | ||
14 | "affiliation": "Centre for Earth Observation Science - | 14 | "affiliation": "Centre for Earth Observation Science - | ||
15 | University of Manitoba", | 15 | University of Manitoba", | ||
16 | "creatorName": "Chan, Wayne", | 16 | "creatorName": "Chan, Wayne", | ||
17 | "email": "wayne.chan@umanitoba.ca", | 17 | "email": "wayne.chan@umanitoba.ca", | ||
18 | "nameIdentifier": "", | 18 | "nameIdentifier": "", | ||
19 | "nameType": "Personal" | 19 | "nameType": "Personal" | ||
20 | }, | 20 | }, | ||
21 | { | 21 | { | ||
22 | "affiliation": "Centre for Earth Observation Science - | 22 | "affiliation": "Centre for Earth Observation Science - | ||
23 | University of Manitoba", | 23 | University of Manitoba", | ||
24 | "creatorName": "Ausen, Emma", | 24 | "creatorName": "Ausen, Emma", | ||
25 | "email": "emma.ausen@umanitoba.ca", | 25 | "email": "emma.ausen@umanitoba.ca", | ||
26 | "nameIdentifier": "", | 26 | "nameIdentifier": "", | ||
27 | "nameType": "Personal" | 27 | "nameType": "Personal" | ||
28 | }, | 28 | }, | ||
29 | { | 29 | { | ||
30 | "affiliation": "Centre for Earth Observation Science - | 30 | "affiliation": "Centre for Earth Observation Science - | ||
31 | University of Manitoba", | 31 | University of Manitoba", | ||
32 | "creatorName": "Barber, David", | 32 | "creatorName": "Barber, David", | ||
33 | "email": "david.barber@umanitoba.ca", | 33 | "email": "david.barber@umanitoba.ca", | ||
34 | "nameIdentifier": "0000-0001-9466-3291", | 34 | "nameIdentifier": "0000-0001-9466-3291", | ||
35 | "nameIdentifierScheme": "ORCID", | 35 | "nameIdentifierScheme": "ORCID", | ||
36 | "nameType": "Personal", | 36 | "nameType": "Personal", | ||
37 | "schemeURI": "http://orcid.org/" | 37 | "schemeURI": "http://orcid.org/" | ||
38 | } | 38 | } | ||
39 | ], | 39 | ], | ||
40 | "Identifier": "10.1139/juvs-2021-0024", | 40 | "Identifier": "10.1139/juvs-2021-0024", | ||
41 | "PublicationYear": "2022", | 41 | "PublicationYear": "2022", | ||
42 | "Publisher": "Drone Systems and Applications", | 42 | "Publisher": "Drone Systems and Applications", | ||
43 | "ResourceType": "journal article", | 43 | "ResourceType": "journal article", | ||
44 | "Rights": "Creative Commons Attribution 4.0 International", | 44 | "Rights": "Creative Commons Attribution 4.0 International", | ||
45 | "Version": "1.0", | 45 | "Version": "1.0", | ||
46 | "author": null, | 46 | "author": null, | ||
47 | "author_email": null, | 47 | "author_email": null, | ||
48 | "citation": "Madison L.Harasyn, Wayne S.Chan, Emma L.Ausen, and | 48 | "citation": "Madison L.Harasyn, Wayne S.Chan, Emma L.Ausen, and | ||
49 | David G.Barber. Detection and tracking of belugas, kayaks and | 49 | David G.Barber. Detection and tracking of belugas, kayaks and | ||
50 | motorized boats in drone video using deep learning. Drone Systems and | 50 | motorized boats in drone video using deep learning. Drone Systems and | ||
51 | Applications. 10(1): 77-96. https://doi.org/10.1139/juvs-2021-0024", | 51 | Applications. 10(1): 77-96. https://doi.org/10.1139/juvs-2021-0024", | ||
52 | "creator_user_id": "cde7b848-a882-4fc7-97c9-670417bd6b43", | 52 | "creator_user_id": "cde7b848-a882-4fc7-97c9-670417bd6b43", | ||
53 | "descriptionType": "Abstract", | 53 | "descriptionType": "Abstract", | ||
54 | "groups": [ | 54 | "groups": [ | ||
55 | { | 55 | { | ||
n | 56 | "description": "Features and characteristics of salt water | n | 56 | "description": "Inland water features, drainage systems and |
57 | bodies.\r\n\r\nIn CEOS, related research themes include | 57 | their characteristics. Examples of data you can find here include | ||
58 | biogeochemistry, modelling, marine mammals, oil spill response, | 58 | river and lake data, water quality data. \r\n\r\nIn CEOS, related | ||
59 | physical oceanography, remote sensing and technology and trace metals | 59 | research themes include biogeochemistry, Inland lakes and waters, | ||
60 | modelling, remote sensing and technology, trace metals and | ||||
60 | and contaminants", | 61 | contaminants.", | ||
61 | "display_name": "Marine", | 62 | "display_name": "Freshwater", | ||
62 | "id": "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | 63 | "id": "8f8cd877-b037-4b1a-b928-f86d9e093741", | ||
63 | "image_display_url": | 64 | "image_display_url": | ||
n | 64 | ata/uploads/group/2021-10-31-211516.365746ofinspireoceanographic.svg", | n | 65 | /data/uploads/group/2021-10-31-211937.658599hyinspirehydrography.svg", |
65 | "name": "marine", | 66 | "name": "freshwater", | ||
66 | "title": "Marine" | 67 | "title": "Freshwater" | ||
67 | }, | ||||
68 | { | ||||
69 | "description": "Image: \"Earth from Space\" by NASA Goddard | ||||
70 | Photo and Video is licensed under CC BY 2.0", | ||||
71 | "display_name": "Remote Sensing", | ||||
72 | "id": "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9", | ||||
73 | "image_display_url": | ||||
74 | anitoba.ca/data/uploads/group/2022-02-05-222621.346712earthimage.jpg", | ||||
75 | "name": "remote-sensing", | ||||
76 | "title": "Remote Sensing" | ||||
77 | } | 68 | } | ||
78 | ], | 69 | ], | ||
79 | "id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | 70 | "id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | ||
80 | "isopen": false, | 71 | "isopen": false, | ||
81 | "language": "English", | 72 | "language": "English", | ||
82 | "licenceType": "Open", | 73 | "licenceType": "Open", | ||
83 | "license_id": null, | 74 | "license_id": null, | ||
84 | "license_title": null, | 75 | "license_title": null, | ||
85 | "maintainer": null, | 76 | "maintainer": null, | ||
86 | "maintainer_email": null, | 77 | "maintainer_email": null, | ||
87 | "metadata_created": "2022-04-07T19:45:13.021227", | 78 | "metadata_created": "2022-04-07T19:45:13.021227", | ||
n | 88 | "metadata_modified": "2023-06-06T20:09:50.526039", | n | 79 | "metadata_modified": "2023-06-06T20:11:25.560811", |
89 | "name": "detect-video-deep-learning", | 80 | "name": "detect-video-deep-learning", | ||
90 | "notes": "Aerial imagery surveys are commonly used in marine mammal | 81 | "notes": "Aerial imagery surveys are commonly used in marine mammal | ||
91 | research to determine population size, distribution and habitat use. | 82 | research to determine population size, distribution and habitat use. | ||
92 | Analysis of aerial photos involves hours of manually identifying | 83 | Analysis of aerial photos involves hours of manually identifying | ||
93 | individuals present in each image and converting raw counts into | 84 | individuals present in each image and converting raw counts into | ||
94 | useable biological statistics. Our research proposes the use of deep | 85 | useable biological statistics. Our research proposes the use of deep | ||
95 | learning algorithms to increase the efficiency of the marine mammal | 86 | learning algorithms to increase the efficiency of the marine mammal | ||
96 | research workflow. To test the feasibility of this proposal, the | 87 | research workflow. To test the feasibility of this proposal, the | ||
97 | existing YOLOv4 convolutional neural network model was trained to | 88 | existing YOLOv4 convolutional neural network model was trained to | ||
98 | detect belugas, kayaks and motorized boats in oblique drone imagery, | 89 | detect belugas, kayaks and motorized boats in oblique drone imagery, | ||
99 | collected from a stationary tethered system. Automated computer-based | 90 | collected from a stationary tethered system. Automated computer-based | ||
100 | object detection achieved the following precision and recall, | 91 | object detection achieved the following precision and recall, | ||
101 | respectively, for each class: beluga = 74%/72%; boat = 97%/99%; and | 92 | respectively, for each class: beluga = 74%/72%; boat = 97%/99%; and | ||
102 | kayak = 96%/96%. We then tested the performance of computer vision | 93 | kayak = 96%/96%. We then tested the performance of computer vision | ||
103 | tracking of belugas and occupied watercraft in drone videos using the | 94 | tracking of belugas and occupied watercraft in drone videos using the | ||
104 | DeepSORT tracking algorithm, which achieved a multiple-object tracking | 95 | DeepSORT tracking algorithm, which achieved a multiple-object tracking | ||
105 | accuracy (MOTA) ranging from 37% to 88% and multiple object tracking | 96 | accuracy (MOTA) ranging from 37% to 88% and multiple object tracking | ||
106 | precision (MOTP) between 63% and 86%. Results from this research | 97 | precision (MOTP) between 63% and 86%. Results from this research | ||
107 | indicate that deep learning technology can detect and track features | 98 | indicate that deep learning technology can detect and track features | ||
108 | more consistently than human annotators, allowing for larger datasets | 99 | more consistently than human annotators, allowing for larger datasets | ||
109 | to be processed within a fraction of the time while avoiding | 100 | to be processed within a fraction of the time while avoiding | ||
110 | discrepancies introduced by labeling fatigue or multiple human | 101 | discrepancies introduced by labeling fatigue or multiple human | ||
111 | annotators.\r\n\r\n**R\u00e9sum\u00e9** Les relev\u00e9s par imagerie | 102 | annotators.\r\n\r\n**R\u00e9sum\u00e9** Les relev\u00e9s par imagerie | ||
112 | a\u00e9rienne sont couramment utilis\u00e9s dans la recherche sur les | 103 | a\u00e9rienne sont couramment utilis\u00e9s dans la recherche sur les | ||
113 | mammif\u00e8res marins pour d\u00e9terminer la taille de la | 104 | mammif\u00e8res marins pour d\u00e9terminer la taille de la | ||
114 | population, sa r\u00e9partition et l\u2019utilisation de | 105 | population, sa r\u00e9partition et l\u2019utilisation de | ||
115 | l\u2019habitat. L\u2019analyse des photos a\u00e9riennes implique des | 106 | l\u2019habitat. L\u2019analyse des photos a\u00e9riennes implique des | ||
116 | heures d\u2019identification manuelle des individus pr\u00e9sents dans | 107 | heures d\u2019identification manuelle des individus pr\u00e9sents dans | ||
117 | chaque image et la conversion des chiffres bruts en statistiques | 108 | chaque image et la conversion des chiffres bruts en statistiques | ||
118 | biologiques utilisables. Notre recherche propose l\u2019utilisation | 109 | biologiques utilisables. Notre recherche propose l\u2019utilisation | ||
119 | d\u2019algorithmes d\u2019apprentissage en profondeur pour augmenter | 110 | d\u2019algorithmes d\u2019apprentissage en profondeur pour augmenter | ||
120 | l\u2019efficacit\u00e9 du flux de recherche sur les mammif\u00e8res | 111 | l\u2019efficacit\u00e9 du flux de recherche sur les mammif\u00e8res | ||
121 | marins. Pour mettre \u00e0 l\u2019essai la faisabilit\u00e9 de cette | 112 | marins. Pour mettre \u00e0 l\u2019essai la faisabilit\u00e9 de cette | ||
122 | proposition, le mod\u00e8le de r\u00e9seau de neurones \u00e0 | 113 | proposition, le mod\u00e8le de r\u00e9seau de neurones \u00e0 | ||
123 | convolution YOLOv4 existant a \u00e9t\u00e9 entra\u00een\u00e9 pour | 114 | convolution YOLOv4 existant a \u00e9t\u00e9 entra\u00een\u00e9 pour | ||
124 | d\u00e9tecter les b\u00e9lugas, les kayaks et les embarcations | 115 | d\u00e9tecter les b\u00e9lugas, les kayaks et les embarcations | ||
125 | motoris\u00e9es dans des images de drones obliques, recueillies \u00e0 | 116 | motoris\u00e9es dans des images de drones obliques, recueillies \u00e0 | ||
126 | partir d\u2019un syst\u00e8me fixe reli\u00e9. La d\u00e9tection | 117 | partir d\u2019un syst\u00e8me fixe reli\u00e9. La d\u00e9tection | ||
127 | automatis\u00e9e d\u2019objets par ordinateur a atteint la | 118 | automatis\u00e9e d\u2019objets par ordinateur a atteint la | ||
128 | pr\u00e9cision et le rappel suivants, respectivement, pour chaque | 119 | pr\u00e9cision et le rappel suivants, respectivement, pour chaque | ||
129 | classe : b\u00e9luga : 74 %/72 %; bateau : 97 %/99 %; kayak : 96 %/96 | 120 | classe : b\u00e9luga : 74 %/72 %; bateau : 97 %/99 %; kayak : 96 %/96 | ||
130 | %. Les auteurs ont ensuite test\u00e9 la performance de poursuite au | 121 | %. Les auteurs ont ensuite test\u00e9 la performance de poursuite au | ||
131 | moyen de la vision par ordinateur des b\u00e9lugas et des motomarines | 122 | moyen de la vision par ordinateur des b\u00e9lugas et des motomarines | ||
132 | dans des vid\u00e9os de drones \u00e0 l\u2019aide de l\u2019algorithme | 123 | dans des vid\u00e9os de drones \u00e0 l\u2019aide de l\u2019algorithme | ||
133 | de poursuite DeepSORT, qui a obtenu une exactitude de poursuite des | 124 | de poursuite DeepSORT, qui a obtenu une exactitude de poursuite des | ||
134 | objets multiples (\u00ab MOTA \u00bb) allant de 37 \u00e0 88 % et une | 125 | objets multiples (\u00ab MOTA \u00bb) allant de 37 \u00e0 88 % et une | ||
135 | pr\u00e9cision de poursuite des objets multiples (\u00ab MOTP \u00bb) | 126 | pr\u00e9cision de poursuite des objets multiples (\u00ab MOTP \u00bb) | ||
136 | allant de 63 \u00e0 86 %. Les r\u00e9sultats de cette recherche | 127 | allant de 63 \u00e0 86 %. Les r\u00e9sultats de cette recherche | ||
137 | indiquent que la technologie d\u2019apprentissage profond peut | 128 | indiquent que la technologie d\u2019apprentissage profond peut | ||
138 | d\u00e9tecter et suivre les caract\u00e9ristiques plus | 129 | d\u00e9tecter et suivre les caract\u00e9ristiques plus | ||
139 | r\u00e9guli\u00e8rement que les annotateurs humains, permettant de | 130 | r\u00e9guli\u00e8rement que les annotateurs humains, permettant de | ||
140 | traiter des ensembles de donn\u00e9es plus volumineux en une fraction | 131 | traiter des ensembles de donn\u00e9es plus volumineux en une fraction | ||
141 | de temps tout en \u00e9vitant les \u00e9carts introduits par la | 132 | de temps tout en \u00e9vitant les \u00e9carts introduits par la | ||
142 | fatigue d\u2019\u00e9tiquetage ou de multiples annotateurs humains. | 133 | fatigue d\u2019\u00e9tiquetage ou de multiples annotateurs humains. | ||
143 | [Traduit par la R\u00e9daction]", | 134 | [Traduit par la R\u00e9daction]", | ||
144 | "num_resources": 2, | 135 | "num_resources": 2, | ||
145 | "num_tags": 6, | 136 | "num_tags": 6, | ||
146 | "organization": { | 137 | "organization": { | ||
147 | "approval_status": "approved", | 138 | "approval_status": "approved", | ||
148 | "created": "2017-07-21T13:15:49.935872", | 139 | "created": "2017-07-21T13:15:49.935872", | ||
149 | "description": "The Centre for Earth Observation Science (CEOS) | 140 | "description": "The Centre for Earth Observation Science (CEOS) | ||
150 | was established in 1994 with a mandate to research, preserve and | 141 | was established in 1994 with a mandate to research, preserve and | ||
151 | communicate knowledge of Earth system processes using the technologies | 142 | communicate knowledge of Earth system processes using the technologies | ||
152 | of Earth Observation Science. Research is multidisciplinary and | 143 | of Earth Observation Science. Research is multidisciplinary and | ||
153 | collaborative seeking to understand the complex interrelationships | 144 | collaborative seeking to understand the complex interrelationships | ||
154 | between elements of Earth systems, and how these systems will likely | 145 | between elements of Earth systems, and how these systems will likely | ||
155 | respond to climate change. Although researchers have worked in many | 146 | respond to climate change. Although researchers have worked in many | ||
156 | regions, the Arctic marine system has always been a unifying focus of | 147 | regions, the Arctic marine system has always been a unifying focus of | ||
157 | activity.\r\n\r\nIn 2012, CEOS, along with the Greenland Climate | 148 | activity.\r\n\r\nIn 2012, CEOS, along with the Greenland Climate | ||
158 | Research Centre (GCRC, Nuuk, Greenland) and the Arctic Research Centre | 149 | Research Centre (GCRC, Nuuk, Greenland) and the Arctic Research Centre | ||
159 | (ARC, Aarhus, Denmark) established the Arctic Science Partnership, | 150 | (ARC, Aarhus, Denmark) established the Arctic Science Partnership, | ||
160 | thereby integrating academic and research initiatives.\r\n\r\nAreas of | 151 | thereby integrating academic and research initiatives.\r\n\r\nAreas of | ||
161 | existing research activity are divided among key themes:\r\n\r\nArctic | 152 | existing research activity are divided among key themes:\r\n\r\nArctic | ||
162 | Anthropology/Paleoclimatology: LiDAR scanning and digital site | 153 | Anthropology/Paleoclimatology: LiDAR scanning and digital site | ||
163 | preservation, archaeo-geophysics, permafrost degredation, lithic | 154 | preservation, archaeo-geophysics, permafrost degredation, lithic | ||
164 | morphometrics, zooarchaeology, proxy studies, paleodistribution of sea | 155 | morphometrics, zooarchaeology, proxy studies, paleodistribution of sea | ||
165 | ice, landscape learning, Paleo-Eskimo culture, Thule Inuit culture, | 156 | ice, landscape learning, Paleo-Eskimo culture, Thule Inuit culture, | ||
166 | ethnographic analogy, traditional knowledge, climate change and | 157 | ethnographic analogy, traditional knowledge, climate change and | ||
167 | northern heritage resource management.\r\n\r\nAtmospheric | 158 | northern heritage resource management.\r\n\r\nAtmospheric | ||
168 | Studies/Meteorology: Boundary layer, precipitation, clouds, storms and | 159 | Studies/Meteorology: Boundary layer, precipitation, clouds, storms and | ||
169 | extreme weather, circulation, eddy correlations, polar vortex, | 160 | extreme weather, circulation, eddy correlations, polar vortex, | ||
170 | climate, teleconnections, geophysical fluid dynamics, flux and energy | 161 | climate, teleconnections, geophysical fluid dynamics, flux and energy | ||
171 | budgets, ocean-sea ice-atmosphere interface, radiative transfer, ice | 162 | budgets, ocean-sea ice-atmosphere interface, radiative transfer, ice | ||
172 | albedo feedback, cloud radiative forcing, pCO2. | 163 | albedo feedback, cloud radiative forcing, pCO2. | ||
173 | \r\n\r\nBiogeochemistry: Organic carbon, greenhouse gases, bubbles, | 164 | \r\n\r\nBiogeochemistry: Organic carbon, greenhouse gases, bubbles, | ||
174 | Ikaite, carbonate chemistry, CO2 fluxes, mercury and other trace | 165 | Ikaite, carbonate chemistry, CO2 fluxes, mercury and other trace | ||
175 | metals, minerals, hydrocarbons, brine processes, otolith | 166 | metals, minerals, hydrocarbons, brine processes, otolith | ||
176 | microchemistry, sediments, biomarkers. \r\n\r\nContaminants: Mercury, | 167 | microchemistry, sediments, biomarkers. \r\n\r\nContaminants: Mercury, | ||
177 | trace metals, PAHs, source, transport, transformation, pathways, | 168 | trace metals, PAHs, source, transport, transformation, pathways, | ||
178 | bioaccumulations, marine ecosystems, marine chemistry. \r\nEarth | 169 | bioaccumulations, marine ecosystems, marine chemistry. \r\nEarth | ||
179 | Observation Science: Active and passive microwave, LiDAR, EM | 170 | Observation Science: Active and passive microwave, LiDAR, EM | ||
180 | induction, spatial-temporal analysis, forward and inverse scattering | 171 | induction, spatial-temporal analysis, forward and inverse scattering | ||
181 | models, complex permittivity, ocean colour, ocean surface roughness, | 172 | models, complex permittivity, ocean colour, ocean surface roughness, | ||
182 | NIR, TIR, satellite telemetry, GPS. Ice-Associated Biology: | 173 | NIR, TIR, satellite telemetry, GPS. Ice-Associated Biology: | ||
183 | Biophysical processes, primary production; ice algae, ice | 174 | Biophysical processes, primary production; ice algae, ice | ||
184 | microbiology, bio-optics, under-ice phytoplankton. \r\n\r\nInland | 175 | microbiology, bio-optics, under-ice phytoplankton. \r\n\r\nInland | ||
185 | Lakes and Waters: Hydrologic connectivity, watershed systems, sediment | 176 | Lakes and Waters: Hydrologic connectivity, watershed systems, sediment | ||
186 | transport, nutrient transport, contaminants, landscape processes, | 177 | transport, nutrient transport, contaminants, landscape processes, | ||
187 | remote sensing, freshwater-marine coupling. Marine Mammals: Seals, | 178 | remote sensing, freshwater-marine coupling. Marine Mammals: Seals, | ||
188 | whales, habitat, conservation, satellite telemetry, distribution, | 179 | whales, habitat, conservation, satellite telemetry, distribution, | ||
189 | population studies, prey behaviour, bioacoustics.\r\n\r\nModelling: | 180 | population studies, prey behaviour, bioacoustics.\r\n\r\nModelling: | ||
190 | Simulation of sea ice and oceanic regional processes, Nucleus for | 181 | Simulation of sea ice and oceanic regional processes, Nucleus for | ||
191 | European Modelling of the Ocean (NEMO), ice-ocean modelling and | 182 | European Modelling of the Ocean (NEMO), ice-ocean modelling and | ||
192 | interactions, hind cast simulations and projections for sea ice state | 183 | interactions, hind cast simulations and projections for sea ice state | ||
193 | and ocean variables based on CMIP5 scenarios and MIROC5 forcing, | 184 | and ocean variables based on CMIP5 scenarios and MIROC5 forcing, | ||
194 | validation.\r\n\r\nOceanography: Circulation, temperature, in-flow and | 185 | validation.\r\n\r\nOceanography: Circulation, temperature, in-flow and | ||
195 | out-flow shelves, water dynamics, microturbulence, Beaufort Gyre, eddy | 186 | out-flow shelves, water dynamics, microturbulence, Beaufort Gyre, eddy | ||
196 | correlations.\r\n\r\nSea Ice Geophysics:Thermodynamic and dynamic | 187 | correlations.\r\n\r\nSea Ice Geophysics:Thermodynamic and dynamic | ||
197 | processes, extreme ice features and hazards, snow, ridges, | 188 | processes, extreme ice features and hazards, snow, ridges, | ||
198 | polynyas.\r\n\r\nTraditional and Local Knowledge: Indigenous cultures, | 189 | polynyas.\r\n\r\nTraditional and Local Knowledge: Indigenous cultures, | ||
199 | Inuit, Inuvialuit, oral history, toponomy, mobility and settlement, | 190 | Inuit, Inuvialuit, oral history, toponomy, mobility and settlement, | ||
200 | hunting, food security, sea ice use, community-based research, | 191 | hunting, food security, sea ice use, community-based research, | ||
201 | community-based monitoring, two ways of knowing.", | 192 | community-based monitoring, two ways of knowing.", | ||
202 | "id": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | 193 | "id": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | ||
203 | "image_url": "2021-11-13-003953.952874UMLogoHORZ.jpg", | 194 | "image_url": "2021-11-13-003953.952874UMLogoHORZ.jpg", | ||
204 | "is_organization": true, | 195 | "is_organization": true, | ||
205 | "name": "ceos", | 196 | "name": "ceos", | ||
206 | "state": "active", | 197 | "state": "active", | ||
207 | "title": "Centre for Earth Observation Science", | 198 | "title": "Centre for Earth Observation Science", | ||
208 | "type": "organization" | 199 | "type": "organization" | ||
209 | }, | 200 | }, | ||
210 | "owner_org": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | 201 | "owner_org": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | ||
211 | "private": false, | 202 | "private": false, | ||
212 | "related_datasets": [ | 203 | "related_datasets": [ | ||
213 | "b5f259b4-3ace-4750-bfb0-47c4e794082f" | 204 | "b5f259b4-3ace-4750-bfb0-47c4e794082f" | ||
214 | ], | 205 | ], | ||
215 | "related_programs": [], | 206 | "related_programs": [], | ||
216 | "relationships_as_object": [], | 207 | "relationships_as_object": [], | ||
217 | "relationships_as_subject": [], | 208 | "relationships_as_subject": [], | ||
218 | "resources": [ | 209 | "resources": [ | ||
219 | { | 210 | { | ||
220 | "cache_last_updated": null, | 211 | "cache_last_updated": null, | ||
221 | "cache_url": null, | 212 | "cache_url": null, | ||
222 | "created": "2022-04-07T19:49:13.974750", | 213 | "created": "2022-04-07T19:49:13.974750", | ||
223 | "datastore_active": false, | 214 | "datastore_active": false, | ||
224 | "datastore_contains_all_records_of_source_file": false, | 215 | "datastore_contains_all_records_of_source_file": false, | ||
225 | "description": "Churchill Beluga Boat Drone Imagery related | 216 | "description": "Churchill Beluga Boat Drone Imagery related | ||
226 | journal article published in Drone Systems and Applications.\r\nDOI: | 217 | journal article published in Drone Systems and Applications.\r\nDOI: | ||
227 | https://doi.org/10.1139/juvs-2021-0024", | 218 | https://doi.org/10.1139/juvs-2021-0024", | ||
228 | "format": "PDF", | 219 | "format": "PDF", | ||
229 | "hash": "", | 220 | "hash": "", | ||
230 | "id": "5bcbb0bc-425b-4fad-b7ff-4c8599043dcf", | 221 | "id": "5bcbb0bc-425b-4fad-b7ff-4c8599043dcf", | ||
231 | "last_modified": "2022-04-07T20:02:20.594051", | 222 | "last_modified": "2022-04-07T20:02:20.594051", | ||
232 | "metadata_modified": "2023-05-18T21:05:56.975275", | 223 | "metadata_modified": "2023-05-18T21:05:56.975275", | ||
233 | "mimetype": "application/pdf", | 224 | "mimetype": "application/pdf", | ||
234 | "mimetype_inner": null, | 225 | "mimetype_inner": null, | ||
235 | "name": "Detection and tracking of belugas, kayaks and motorized | 226 | "name": "Detection and tracking of belugas, kayaks and motorized | ||
236 | boats in drone video using deep learning", | 227 | boats in drone video using deep learning", | ||
237 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | 228 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | ||
238 | "position": 0, | 229 | "position": 0, | ||
239 | "resCategory": "supplemental", | 230 | "resCategory": "supplemental", | ||
240 | "resource_type": null, | 231 | "resource_type": null, | ||
241 | "size": 4104522, | 232 | "size": 4104522, | ||
242 | "state": "active", | 233 | "state": "active", | ||
243 | "url": | 234 | "url": | ||
244 | rce/5bcbb0bc-425b-4fad-b7ff-4c8599043dcf/download/juvs-2021-0024.pdf", | 235 | rce/5bcbb0bc-425b-4fad-b7ff-4c8599043dcf/download/juvs-2021-0024.pdf", | ||
245 | "url_type": "upload" | 236 | "url_type": "upload" | ||
246 | }, | 237 | }, | ||
247 | { | 238 | { | ||
248 | "cache_last_updated": null, | 239 | "cache_last_updated": null, | ||
249 | "cache_url": null, | 240 | "cache_url": null, | ||
250 | "created": "2023-05-18T21:05:57.006643", | 241 | "created": "2023-05-18T21:05:57.006643", | ||
251 | "datastore_active": false, | 242 | "datastore_active": false, | ||
252 | "datastore_contains_all_records_of_source_file": false, | 243 | "datastore_contains_all_records_of_source_file": false, | ||
253 | "description": "Researchers at CEOS are often asked to write a | 244 | "description": "Researchers at CEOS are often asked to write a | ||
254 | field story about their work, to make their research more accessible. | 245 | field story about their work, to make their research more accessible. | ||
255 | We decided to do something a little different for our work on applying | 246 | We decided to do something a little different for our work on applying | ||
256 | machine learning to detecting and tracking beluga whales: we are | 247 | machine learning to detecting and tracking beluga whales: we are | ||
257 | presenting it as a comic-book style video!", | 248 | presenting it as a comic-book style video!", | ||
258 | "format": "", | 249 | "format": "", | ||
259 | "hash": "", | 250 | "hash": "", | ||
260 | "id": "1cd6dbae-5c9d-440d-b29b-26c84fbc5a7c", | 251 | "id": "1cd6dbae-5c9d-440d-b29b-26c84fbc5a7c", | ||
261 | "last_modified": null, | 252 | "last_modified": null, | ||
262 | "metadata_modified": "2023-05-18T21:05:56.975546", | 253 | "metadata_modified": "2023-05-18T21:05:56.975546", | ||
263 | "mimetype": null, | 254 | "mimetype": null, | ||
264 | "mimetype_inner": null, | 255 | "mimetype_inner": null, | ||
265 | "name": "One Beluga, Two Beluga, Three Beluga, Four: How to | 256 | "name": "One Beluga, Two Beluga, Three Beluga, Four: How to | ||
266 | Count Belugas When You Run Out of Fingers and Toes", | 257 | Count Belugas When You Run Out of Fingers and Toes", | ||
267 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | 258 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | ||
268 | "position": 1, | 259 | "position": 1, | ||
269 | "resCategory": "supplemental", | 260 | "resCategory": "supplemental", | ||
270 | "resource_type": null, | 261 | "resource_type": null, | ||
271 | "size": null, | 262 | "size": null, | ||
272 | "state": "active", | 263 | "state": "active", | ||
273 | "url": | 264 | "url": | ||
274 | n/beluga-graphic-novel/resource/58aed159-4a62-4c2b-9978-967ad5f356a6", | 265 | n/beluga-graphic-novel/resource/58aed159-4a62-4c2b-9978-967ad5f356a6", | ||
275 | "url_type": null | 266 | "url_type": null | ||
276 | } | 267 | } | ||
277 | ], | 268 | ], | ||
278 | "rightsIdentifier": "CC-BY-4.0", | 269 | "rightsIdentifier": "CC-BY-4.0", | ||
279 | "rightsIdentifierScheme": "SPDX", | 270 | "rightsIdentifierScheme": "SPDX", | ||
280 | "rightsSchemeURI": "https://spdx.org/licenses", | 271 | "rightsSchemeURI": "https://spdx.org/licenses", | ||
281 | "rightsURI": "https://spdx.org/licenses/CC-BY-4.0.html", | 272 | "rightsURI": "https://spdx.org/licenses/CC-BY-4.0.html", | ||
n | 282 | "schemeURI": "", | n | 273 | "schemeURI": |
274 | "https://www.polardata.ca/pdcinput/public/keywordlibrary", | ||||
283 | "state": "active", | 275 | "state": "active", | ||
t | 284 | "subjectScheme": "", | t | 276 | "subjectScheme": "Polar Data Catalogue", |
285 | "tags": [ | 277 | "tags": [ | ||
286 | { | 278 | { | ||
287 | "display_name": "Beluga", | 279 | "display_name": "Beluga", | ||
288 | "id": "a9f25a89-b0ef-4d4d-993d-73f28e0d702a", | 280 | "id": "a9f25a89-b0ef-4d4d-993d-73f28e0d702a", | ||
289 | "name": "Beluga", | 281 | "name": "Beluga", | ||
290 | "state": "active", | 282 | "state": "active", | ||
291 | "vocabulary_id": null | 283 | "vocabulary_id": null | ||
292 | }, | 284 | }, | ||
293 | { | 285 | { | ||
294 | "display_name": "Unmanned Aerial Vehicle", | 286 | "display_name": "Unmanned Aerial Vehicle", | ||
295 | "id": "a6dc9001-e6da-4a84-bfec-2941d3ebce78", | 287 | "id": "a6dc9001-e6da-4a84-bfec-2941d3ebce78", | ||
296 | "name": "Unmanned Aerial Vehicle", | 288 | "name": "Unmanned Aerial Vehicle", | ||
297 | "state": "active", | 289 | "state": "active", | ||
298 | "vocabulary_id": null | 290 | "vocabulary_id": null | ||
299 | }, | 291 | }, | ||
300 | { | 292 | { | ||
301 | "display_name": "computer vision", | 293 | "display_name": "computer vision", | ||
302 | "id": "d7270905-c420-4d19-aa9c-c6f818ab5b67", | 294 | "id": "d7270905-c420-4d19-aa9c-c6f818ab5b67", | ||
303 | "name": "computer vision", | 295 | "name": "computer vision", | ||
304 | "state": "active", | 296 | "state": "active", | ||
305 | "vocabulary_id": null | 297 | "vocabulary_id": null | ||
306 | }, | 298 | }, | ||
307 | { | 299 | { | ||
308 | "display_name": "deep learning", | 300 | "display_name": "deep learning", | ||
309 | "id": "87526358-2d8a-4c78-8375-38c132b53d5a", | 301 | "id": "87526358-2d8a-4c78-8375-38c132b53d5a", | ||
310 | "name": "deep learning", | 302 | "name": "deep learning", | ||
311 | "state": "active", | 303 | "state": "active", | ||
312 | "vocabulary_id": null | 304 | "vocabulary_id": null | ||
313 | }, | 305 | }, | ||
314 | { | 306 | { | ||
315 | "display_name": "object detection", | 307 | "display_name": "object detection", | ||
316 | "id": "a3d44586-cba5-4685-b07a-2d2f16578353", | 308 | "id": "a3d44586-cba5-4685-b07a-2d2f16578353", | ||
317 | "name": "object detection", | 309 | "name": "object detection", | ||
318 | "state": "active", | 310 | "state": "active", | ||
319 | "vocabulary_id": null | 311 | "vocabulary_id": null | ||
320 | }, | 312 | }, | ||
321 | { | 313 | { | ||
322 | "display_name": "object tracking", | 314 | "display_name": "object tracking", | ||
323 | "id": "28ce0864-2ed8-43d1-b80d-d79684cac63f", | 315 | "id": "28ce0864-2ed8-43d1-b80d-d79684cac63f", | ||
324 | "name": "object tracking", | 316 | "name": "object tracking", | ||
325 | "state": "active", | 317 | "state": "active", | ||
326 | "vocabulary_id": null | 318 | "vocabulary_id": null | ||
327 | } | 319 | } | ||
328 | ], | 320 | ], | ||
329 | "theme": [ | 321 | "theme": [ | ||
330 | "8f8cd877-b037-4b1a-b928-f86d9e093741", | 322 | "8f8cd877-b037-4b1a-b928-f86d9e093741", | ||
331 | "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | 323 | "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | ||
332 | "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9" | 324 | "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9" | ||
333 | ], | 325 | ], | ||
334 | "title": "Detection and tracking of belugas, kayaks and motorized | 326 | "title": "Detection and tracking of belugas, kayaks and motorized | ||
335 | boats in drone video using deep learning", | 327 | boats in drone video using deep learning", | ||
336 | "type": "publication", | 328 | "type": "publication", | ||
337 | "url": null, | 329 | "url": null, | ||
338 | "version": null | 330 | "version": null | ||
339 | } | 331 | } |