Changes
On May 18, 2023 at 4:05:57 PM CDT, Claire Herbert:
-
Changed value of field
related_datasets
to['b5f259b4-3ace-4750-bfb0-47c4e794082f']
in Detection and tracking of belugas, kayaks and motorized boats in drone video using deep learning -
Changed value of field
related_programs
to[]
in Detection and tracking of belugas, kayaks and motorized boats in drone video using deep learning -
Added the following fields to Detection and tracking of belugas, kayaks and motorized boats in drone video using deep learning
- funderSchemeURI
- keywords with value Beluga,Unmanned Aerial Vehicle,computer vision,deep learning,object detection,object tracking
- awardURI with value https://www.cerc.gc.ca/
- funderName
- grantNumber
- funderIdentifierType
- awardTitle with value The Canada Excellence Research Chair (CERC) and the Canada Research Chair (CRC programs)
- funderIdentifier
-
Added resource One Beluga, Two Beluga, Three Beluga, Four: How to Count Belugas When You Run Out of Fingers and Toes to Detection and tracking of belugas, kayaks and motorized boats in drone video using deep learning
f | 1 | { | f | 1 | { |
2 | "Author": [ | 2 | "Author": [ | ||
3 | { | 3 | { | ||
4 | "affiliation": "Centre for Earth Observation Science - | 4 | "affiliation": "Centre for Earth Observation Science - | ||
5 | University of Manitoba", | 5 | University of Manitoba", | ||
6 | "creatorName": "Harasyn, Madison", | 6 | "creatorName": "Harasyn, Madison", | ||
7 | "email": "Madison.harasyn@umanitoba.ca", | 7 | "email": "Madison.harasyn@umanitoba.ca", | ||
8 | "nameIdentifier": "https://orcid.org/0000-0002-5741-6766", | 8 | "nameIdentifier": "https://orcid.org/0000-0002-5741-6766", | ||
9 | "nameIdentifierScheme": "ORCID", | 9 | "nameIdentifierScheme": "ORCID", | ||
10 | "nameType": "Personal", | 10 | "nameType": "Personal", | ||
11 | "schemeURI": "http://orcid.org/" | 11 | "schemeURI": "http://orcid.org/" | ||
12 | }, | 12 | }, | ||
13 | { | 13 | { | ||
14 | "affiliation": "Centre for Earth Observation Science - | 14 | "affiliation": "Centre for Earth Observation Science - | ||
15 | University of Manitoba", | 15 | University of Manitoba", | ||
16 | "creatorName": "Chan, Wayne", | 16 | "creatorName": "Chan, Wayne", | ||
17 | "email": "wayne.chan@umanitoba.ca", | 17 | "email": "wayne.chan@umanitoba.ca", | ||
18 | "nameIdentifier": "", | 18 | "nameIdentifier": "", | ||
19 | "nameType": "Personal" | 19 | "nameType": "Personal" | ||
20 | }, | 20 | }, | ||
21 | { | 21 | { | ||
22 | "affiliation": "Centre for Earth Observation Science - | 22 | "affiliation": "Centre for Earth Observation Science - | ||
23 | University of Manitoba", | 23 | University of Manitoba", | ||
24 | "creatorName": "Ausen, Emma", | 24 | "creatorName": "Ausen, Emma", | ||
25 | "email": "emma.ausen@umanitoba.ca", | 25 | "email": "emma.ausen@umanitoba.ca", | ||
26 | "nameIdentifier": "", | 26 | "nameIdentifier": "", | ||
27 | "nameType": "Personal" | 27 | "nameType": "Personal" | ||
28 | }, | 28 | }, | ||
29 | { | 29 | { | ||
30 | "affiliation": "Centre for Earth Observation Science - | 30 | "affiliation": "Centre for Earth Observation Science - | ||
31 | University of Manitoba", | 31 | University of Manitoba", | ||
32 | "creatorName": "Barber, David", | 32 | "creatorName": "Barber, David", | ||
33 | "email": "david.barber@umanitoba.ca", | 33 | "email": "david.barber@umanitoba.ca", | ||
34 | "nameIdentifier": "0000-0001-9466-3291", | 34 | "nameIdentifier": "0000-0001-9466-3291", | ||
35 | "nameIdentifierScheme": "ORCID", | 35 | "nameIdentifierScheme": "ORCID", | ||
36 | "nameType": "Personal", | 36 | "nameType": "Personal", | ||
37 | "schemeURI": "http://orcid.org/" | 37 | "schemeURI": "http://orcid.org/" | ||
38 | } | 38 | } | ||
39 | ], | 39 | ], | ||
40 | "Identifier": "10.1139/juvs-2021-0024", | 40 | "Identifier": "10.1139/juvs-2021-0024", | ||
41 | "PublicationYear": "2022", | 41 | "PublicationYear": "2022", | ||
42 | "Publisher": "Drone Systems and Applications", | 42 | "Publisher": "Drone Systems and Applications", | ||
43 | "ResourceType": "journal article", | 43 | "ResourceType": "journal article", | ||
44 | "Rights": "Creative Commons Attribution 4.0 International", | 44 | "Rights": "Creative Commons Attribution 4.0 International", | ||
45 | "Version": "1.0", | 45 | "Version": "1.0", | ||
46 | "author": null, | 46 | "author": null, | ||
47 | "author_email": null, | 47 | "author_email": null, | ||
n | 48 | "awardTitle": "The Canada Excellence Research Chair (CERC) and the | n | ||
49 | Canada Research Chair (CRC programs)", | ||||
50 | "awardURI": "https://www.cerc.gc.ca/", | ||||
51 | "citation": "Madison L.Harasyn, Wayne S.Chan, Emma L.Ausen, and | 48 | "citation": "Madison L.Harasyn, Wayne S.Chan, Emma L.Ausen, and | ||
52 | David G.Barber. Detection and tracking of belugas, kayaks and | 49 | David G.Barber. Detection and tracking of belugas, kayaks and | ||
53 | motorized boats in drone video using deep learning. Drone Systems and | 50 | motorized boats in drone video using deep learning. Drone Systems and | ||
54 | Applications. 10(1): 77-96. https://doi.org/10.1139/juvs-2021-0024", | 51 | Applications. 10(1): 77-96. https://doi.org/10.1139/juvs-2021-0024", | ||
55 | "creator_user_id": "cde7b848-a882-4fc7-97c9-670417bd6b43", | 52 | "creator_user_id": "cde7b848-a882-4fc7-97c9-670417bd6b43", | ||
56 | "descriptionType": "Abstract", | 53 | "descriptionType": "Abstract", | ||
n | 57 | "funderIdentifier": "", | n | 54 | "extras": [ |
55 | { | ||||
56 | "key": "awardTitle", | ||||
57 | "value": "The Canada Excellence Research Chair (CERC) and the | ||||
58 | Canada Research Chair (CRC programs)" | ||||
59 | }, | ||||
60 | { | ||||
61 | "key": "awardURI", | ||||
62 | "value": "https://www.cerc.gc.ca/" | ||||
63 | }, | ||||
64 | { | ||||
65 | "key": "funderIdentifier", | ||||
66 | "value": "" | ||||
67 | }, | ||||
68 | { | ||||
58 | "funderIdentifierType": "", | 69 | "key": "funderIdentifierType", | ||
59 | "funderName": "", | 70 | "value": "" | ||
60 | "funderSchemeURI": "", | 71 | }, | ||
61 | "grantNumber": "", | 72 | { | ||
73 | "key": "funderName", | ||||
74 | "value": "" | ||||
75 | }, | ||||
76 | { | ||||
77 | "key": "funderSchemeURI", | ||||
78 | "value": "" | ||||
79 | }, | ||||
80 | { | ||||
81 | "key": "grantNumber", | ||||
82 | "value": "" | ||||
83 | }, | ||||
84 | { | ||||
85 | "key": "keywords", | ||||
86 | "value": "Beluga,Unmanned Aerial Vehicle,computer vision,deep | ||||
87 | learning,object detection,object tracking" | ||||
88 | } | ||||
89 | ], | ||||
62 | "groups": [ | 90 | "groups": [ | ||
n | n | 91 | { | ||
92 | "description": "Features and characteristics of salt water | ||||
93 | bodies.\r\n\r\nIn CEOS, related research themes include | ||||
94 | biogeochemistry, modelling, marine mammals, oil spill response, | ||||
95 | physical oceanography, remote sensing and technology and trace metals | ||||
96 | and contaminants", | ||||
97 | "display_name": "Marine", | ||||
98 | "id": "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | ||||
99 | "image_display_url": | ||||
100 | ata/uploads/group/2021-10-31-211516.365746ofinspireoceanographic.svg", | ||||
101 | "name": "marine", | ||||
102 | "title": "Marine" | ||||
103 | }, | ||||
63 | { | 104 | { | ||
64 | "description": "Image: \"Earth from Space\" by NASA Goddard | 105 | "description": "Image: \"Earth from Space\" by NASA Goddard | ||
65 | Photo and Video is licensed under CC BY 2.0", | 106 | Photo and Video is licensed under CC BY 2.0", | ||
66 | "display_name": "Remote Sensing", | 107 | "display_name": "Remote Sensing", | ||
67 | "id": "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9", | 108 | "id": "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9", | ||
68 | "image_display_url": | 109 | "image_display_url": | ||
69 | anitoba.ca/data/uploads/group/2022-02-05-222621.346712earthimage.jpg", | 110 | anitoba.ca/data/uploads/group/2022-02-05-222621.346712earthimage.jpg", | ||
70 | "name": "remote-sensing", | 111 | "name": "remote-sensing", | ||
71 | "title": "Remote Sensing" | 112 | "title": "Remote Sensing" | ||
72 | } | 113 | } | ||
73 | ], | 114 | ], | ||
74 | "id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | 115 | "id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | ||
75 | "isopen": false, | 116 | "isopen": false, | ||
n | 76 | "keywords": "Beluga,Unmanned Aerial Vehicle,computer vision,deep | n | ||
77 | learning,object detection,object tracking", | ||||
78 | "language": "English", | 117 | "language": "English", | ||
79 | "licenceType": "Open", | 118 | "licenceType": "Open", | ||
80 | "license_id": null, | 119 | "license_id": null, | ||
81 | "license_title": null, | 120 | "license_title": null, | ||
82 | "maintainer": null, | 121 | "maintainer": null, | ||
83 | "maintainer_email": null, | 122 | "maintainer_email": null, | ||
84 | "metadata_created": "2022-04-07T19:45:13.021227", | 123 | "metadata_created": "2022-04-07T19:45:13.021227", | ||
n | 85 | "metadata_modified": "2022-11-02T19:46:29.787030", | n | 124 | "metadata_modified": "2023-05-18T21:05:56.950941", |
86 | "name": "detect-video-deep-learning", | 125 | "name": "detect-video-deep-learning", | ||
87 | "notes": "Aerial imagery surveys are commonly used in marine mammal | 126 | "notes": "Aerial imagery surveys are commonly used in marine mammal | ||
88 | research to determine population size, distribution and habitat use. | 127 | research to determine population size, distribution and habitat use. | ||
89 | Analysis of aerial photos involves hours of manually identifying | 128 | Analysis of aerial photos involves hours of manually identifying | ||
90 | individuals present in each image and converting raw counts into | 129 | individuals present in each image and converting raw counts into | ||
91 | useable biological statistics. Our research proposes the use of deep | 130 | useable biological statistics. Our research proposes the use of deep | ||
92 | learning algorithms to increase the efficiency of the marine mammal | 131 | learning algorithms to increase the efficiency of the marine mammal | ||
93 | research workflow. To test the feasibility of this proposal, the | 132 | research workflow. To test the feasibility of this proposal, the | ||
94 | existing YOLOv4 convolutional neural network model was trained to | 133 | existing YOLOv4 convolutional neural network model was trained to | ||
95 | detect belugas, kayaks and motorized boats in oblique drone imagery, | 134 | detect belugas, kayaks and motorized boats in oblique drone imagery, | ||
96 | collected from a stationary tethered system. Automated computer-based | 135 | collected from a stationary tethered system. Automated computer-based | ||
97 | object detection achieved the following precision and recall, | 136 | object detection achieved the following precision and recall, | ||
98 | respectively, for each class: beluga = 74%/72%; boat = 97%/99%; and | 137 | respectively, for each class: beluga = 74%/72%; boat = 97%/99%; and | ||
99 | kayak = 96%/96%. We then tested the performance of computer vision | 138 | kayak = 96%/96%. We then tested the performance of computer vision | ||
100 | tracking of belugas and occupied watercraft in drone videos using the | 139 | tracking of belugas and occupied watercraft in drone videos using the | ||
101 | DeepSORT tracking algorithm, which achieved a multiple-object tracking | 140 | DeepSORT tracking algorithm, which achieved a multiple-object tracking | ||
102 | accuracy (MOTA) ranging from 37% to 88% and multiple object tracking | 141 | accuracy (MOTA) ranging from 37% to 88% and multiple object tracking | ||
103 | precision (MOTP) between 63% and 86%. Results from this research | 142 | precision (MOTP) between 63% and 86%. Results from this research | ||
104 | indicate that deep learning technology can detect and track features | 143 | indicate that deep learning technology can detect and track features | ||
105 | more consistently than human annotators, allowing for larger datasets | 144 | more consistently than human annotators, allowing for larger datasets | ||
106 | to be processed within a fraction of the time while avoiding | 145 | to be processed within a fraction of the time while avoiding | ||
107 | discrepancies introduced by labeling fatigue or multiple human | 146 | discrepancies introduced by labeling fatigue or multiple human | ||
108 | annotators.", | 147 | annotators.", | ||
n | 109 | "num_resources": 1, | n | 148 | "num_resources": 2, |
110 | "num_tags": 6, | 149 | "num_tags": 6, | ||
111 | "organization": { | 150 | "organization": { | ||
112 | "approval_status": "approved", | 151 | "approval_status": "approved", | ||
113 | "created": "2017-07-21T13:15:49.935872", | 152 | "created": "2017-07-21T13:15:49.935872", | ||
114 | "description": "The Centre for Earth Observation Science (CEOS) | 153 | "description": "The Centre for Earth Observation Science (CEOS) | ||
115 | was established in 1994 with a mandate to research, preserve and | 154 | was established in 1994 with a mandate to research, preserve and | ||
116 | communicate knowledge of Earth system processes using the technologies | 155 | communicate knowledge of Earth system processes using the technologies | ||
117 | of Earth Observation Science. Research is multidisciplinary and | 156 | of Earth Observation Science. Research is multidisciplinary and | ||
118 | collaborative seeking to understand the complex interrelationships | 157 | collaborative seeking to understand the complex interrelationships | ||
119 | between elements of Earth systems, and how these systems will likely | 158 | between elements of Earth systems, and how these systems will likely | ||
120 | respond to climate change. Although researchers have worked in many | 159 | respond to climate change. Although researchers have worked in many | ||
121 | regions, the Arctic marine system has always been a unifying focus of | 160 | regions, the Arctic marine system has always been a unifying focus of | ||
122 | activity.\r\n\r\nIn 2012, CEOS, along with the Greenland Climate | 161 | activity.\r\n\r\nIn 2012, CEOS, along with the Greenland Climate | ||
123 | Research Centre (GCRC, Nuuk, Greenland) and the Arctic Research Centre | 162 | Research Centre (GCRC, Nuuk, Greenland) and the Arctic Research Centre | ||
124 | (ARC, Aarhus, Denmark) established the Arctic Science Partnership, | 163 | (ARC, Aarhus, Denmark) established the Arctic Science Partnership, | ||
125 | thereby integrating academic and research initiatives.\r\n\r\nAreas of | 164 | thereby integrating academic and research initiatives.\r\n\r\nAreas of | ||
126 | existing research activity are divided among key themes:\r\n\r\nArctic | 165 | existing research activity are divided among key themes:\r\n\r\nArctic | ||
127 | Anthropology/Paleoclimatology: LiDAR scanning and digital site | 166 | Anthropology/Paleoclimatology: LiDAR scanning and digital site | ||
128 | preservation, archaeo-geophysics, permafrost degredation, lithic | 167 | preservation, archaeo-geophysics, permafrost degredation, lithic | ||
129 | morphometrics, zooarchaeology, proxy studies, paleodistribution of sea | 168 | morphometrics, zooarchaeology, proxy studies, paleodistribution of sea | ||
130 | ice, landscape learning, Paleo-Eskimo culture, Thule Inuit culture, | 169 | ice, landscape learning, Paleo-Eskimo culture, Thule Inuit culture, | ||
131 | ethnographic analogy, traditional knowledge, climate change and | 170 | ethnographic analogy, traditional knowledge, climate change and | ||
132 | northern heritage resource management.\r\n\r\nAtmospheric | 171 | northern heritage resource management.\r\n\r\nAtmospheric | ||
133 | Studies/Meteorology: Boundary layer, precipitation, clouds, storms and | 172 | Studies/Meteorology: Boundary layer, precipitation, clouds, storms and | ||
134 | extreme weather, circulation, eddy correlations, polar vortex, | 173 | extreme weather, circulation, eddy correlations, polar vortex, | ||
135 | climate, teleconnections, geophysical fluid dynamics, flux and energy | 174 | climate, teleconnections, geophysical fluid dynamics, flux and energy | ||
136 | budgets, ocean-sea ice-atmosphere interface, radiative transfer, ice | 175 | budgets, ocean-sea ice-atmosphere interface, radiative transfer, ice | ||
137 | albedo feedback, cloud radiative forcing, pCO2. | 176 | albedo feedback, cloud radiative forcing, pCO2. | ||
138 | \r\n\r\nBiogeochemistry: Organic carbon, greenhouse gases, bubbles, | 177 | \r\n\r\nBiogeochemistry: Organic carbon, greenhouse gases, bubbles, | ||
139 | Ikaite, carbonate chemistry, CO2 fluxes, mercury and other trace | 178 | Ikaite, carbonate chemistry, CO2 fluxes, mercury and other trace | ||
140 | metals, minerals, hydrocarbons, brine processes, otolith | 179 | metals, minerals, hydrocarbons, brine processes, otolith | ||
141 | microchemistry, sediments, biomarkers. \r\n\r\nContaminants: Mercury, | 180 | microchemistry, sediments, biomarkers. \r\n\r\nContaminants: Mercury, | ||
142 | trace metals, PAHs, source, transport, transformation, pathways, | 181 | trace metals, PAHs, source, transport, transformation, pathways, | ||
143 | bioaccumulations, marine ecosystems, marine chemistry. \r\nEarth | 182 | bioaccumulations, marine ecosystems, marine chemistry. \r\nEarth | ||
144 | Observation Science: Active and passive microwave, LiDAR, EM | 183 | Observation Science: Active and passive microwave, LiDAR, EM | ||
145 | induction, spatial-temporal analysis, forward and inverse scattering | 184 | induction, spatial-temporal analysis, forward and inverse scattering | ||
146 | models, complex permittivity, ocean colour, ocean surface roughness, | 185 | models, complex permittivity, ocean colour, ocean surface roughness, | ||
147 | NIR, TIR, satellite telemetry, GPS. Ice-Associated Biology: | 186 | NIR, TIR, satellite telemetry, GPS. Ice-Associated Biology: | ||
148 | Biophysical processes, primary production; ice algae, ice | 187 | Biophysical processes, primary production; ice algae, ice | ||
149 | microbiology, bio-optics, under-ice phytoplankton. \r\n\r\nInland | 188 | microbiology, bio-optics, under-ice phytoplankton. \r\n\r\nInland | ||
150 | Lakes and Waters: Hydrologic connectivity, watershed systems, sediment | 189 | Lakes and Waters: Hydrologic connectivity, watershed systems, sediment | ||
151 | transport, nutrient transport, contaminants, landscape processes, | 190 | transport, nutrient transport, contaminants, landscape processes, | ||
152 | remote sensing, freshwater-marine coupling. Marine Mammals: Seals, | 191 | remote sensing, freshwater-marine coupling. Marine Mammals: Seals, | ||
153 | whales, habitat, conservation, satellite telemetry, distribution, | 192 | whales, habitat, conservation, satellite telemetry, distribution, | ||
154 | population studies, prey behaviour, bioacoustics.\r\n\r\nModelling: | 193 | population studies, prey behaviour, bioacoustics.\r\n\r\nModelling: | ||
155 | Simulation of sea ice and oceanic regional processes, Nucleus for | 194 | Simulation of sea ice and oceanic regional processes, Nucleus for | ||
156 | European Modelling of the Ocean (NEMO), ice-ocean modelling and | 195 | European Modelling of the Ocean (NEMO), ice-ocean modelling and | ||
157 | interactions, hind cast simulations and projections for sea ice state | 196 | interactions, hind cast simulations and projections for sea ice state | ||
158 | and ocean variables based on CMIP5 scenarios and MIROC5 forcing, | 197 | and ocean variables based on CMIP5 scenarios and MIROC5 forcing, | ||
159 | validation.\r\n\r\nOceanography: Circulation, temperature, in-flow and | 198 | validation.\r\n\r\nOceanography: Circulation, temperature, in-flow and | ||
160 | out-flow shelves, water dynamics, microturbulence, Beaufort Gyre, eddy | 199 | out-flow shelves, water dynamics, microturbulence, Beaufort Gyre, eddy | ||
161 | correlations.\r\n\r\nSea Ice Geophysics:Thermodynamic and dynamic | 200 | correlations.\r\n\r\nSea Ice Geophysics:Thermodynamic and dynamic | ||
162 | processes, extreme ice features and hazards, snow, ridges, | 201 | processes, extreme ice features and hazards, snow, ridges, | ||
163 | polynyas.\r\n\r\nTraditional and Local Knowledge: Indigenous cultures, | 202 | polynyas.\r\n\r\nTraditional and Local Knowledge: Indigenous cultures, | ||
164 | Inuit, Inuvialuit, oral history, toponomy, mobility and settlement, | 203 | Inuit, Inuvialuit, oral history, toponomy, mobility and settlement, | ||
165 | hunting, food security, sea ice use, community-based research, | 204 | hunting, food security, sea ice use, community-based research, | ||
166 | community-based monitoring, two ways of knowing.", | 205 | community-based monitoring, two ways of knowing.", | ||
167 | "id": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | 206 | "id": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | ||
168 | "image_url": "2021-11-13-003953.952874UMLogoHORZ.jpg", | 207 | "image_url": "2021-11-13-003953.952874UMLogoHORZ.jpg", | ||
169 | "is_organization": true, | 208 | "is_organization": true, | ||
170 | "name": "ceos", | 209 | "name": "ceos", | ||
171 | "state": "active", | 210 | "state": "active", | ||
172 | "title": "Centre for Earth Observation Science", | 211 | "title": "Centre for Earth Observation Science", | ||
173 | "type": "organization" | 212 | "type": "organization" | ||
174 | }, | 213 | }, | ||
175 | "owner_org": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | 214 | "owner_org": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | ||
176 | "private": false, | 215 | "private": false, | ||
n | 177 | "related_datasets": "[\"b5f259b4-3ace-4750-bfb0-47c4e794082f\"]", | n | 216 | "related_datasets": [ |
217 | "b5f259b4-3ace-4750-bfb0-47c4e794082f" | ||||
218 | ], | ||||
178 | "related_programs": "[]", | 219 | "related_programs": [], | ||
179 | "relationships_as_object": [], | 220 | "relationships_as_object": [], | ||
180 | "relationships_as_subject": [], | 221 | "relationships_as_subject": [], | ||
181 | "resources": [ | 222 | "resources": [ | ||
182 | { | 223 | { | ||
183 | "cache_last_updated": null, | 224 | "cache_last_updated": null, | ||
184 | "cache_url": null, | 225 | "cache_url": null, | ||
185 | "created": "2022-04-07T19:49:13.974750", | 226 | "created": "2022-04-07T19:49:13.974750", | ||
186 | "datastore_active": false, | 227 | "datastore_active": false, | ||
187 | "datastore_contains_all_records_of_source_file": false, | 228 | "datastore_contains_all_records_of_source_file": false, | ||
188 | "description": "Churchill Beluga Boat Drone Imagery related | 229 | "description": "Churchill Beluga Boat Drone Imagery related | ||
189 | journal article published in Drone Systems and Applications.\r\nDOI: | 230 | journal article published in Drone Systems and Applications.\r\nDOI: | ||
190 | https://doi.org/10.1139/juvs-2021-0024", | 231 | https://doi.org/10.1139/juvs-2021-0024", | ||
191 | "format": "PDF", | 232 | "format": "PDF", | ||
192 | "hash": "", | 233 | "hash": "", | ||
193 | "id": "5bcbb0bc-425b-4fad-b7ff-4c8599043dcf", | 234 | "id": "5bcbb0bc-425b-4fad-b7ff-4c8599043dcf", | ||
194 | "last_modified": "2022-04-07T20:02:20.594051", | 235 | "last_modified": "2022-04-07T20:02:20.594051", | ||
n | 195 | "metadata_modified": "2022-04-07T20:02:20.619290", | n | 236 | "metadata_modified": "2023-05-18T21:05:56.975275", |
196 | "mimetype": "application/pdf", | 237 | "mimetype": "application/pdf", | ||
197 | "mimetype_inner": null, | 238 | "mimetype_inner": null, | ||
198 | "name": "Detection and tracking of belugas, kayaks and motorized | 239 | "name": "Detection and tracking of belugas, kayaks and motorized | ||
199 | boats in drone video using deep learning", | 240 | boats in drone video using deep learning", | ||
200 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | 241 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | ||
201 | "position": 0, | 242 | "position": 0, | ||
202 | "resCategory": "supplemental", | 243 | "resCategory": "supplemental", | ||
203 | "resource_type": null, | 244 | "resource_type": null, | ||
204 | "size": 4104522, | 245 | "size": 4104522, | ||
205 | "state": "active", | 246 | "state": "active", | ||
206 | "url": | 247 | "url": | ||
207 | rce/5bcbb0bc-425b-4fad-b7ff-4c8599043dcf/download/juvs-2021-0024.pdf", | 248 | rce/5bcbb0bc-425b-4fad-b7ff-4c8599043dcf/download/juvs-2021-0024.pdf", | ||
208 | "url_type": "upload" | 249 | "url_type": "upload" | ||
t | t | 250 | }, | ||
251 | { | ||||
252 | "cache_last_updated": null, | ||||
253 | "cache_url": null, | ||||
254 | "created": "2023-05-18T21:05:57.006643", | ||||
255 | "datastore_active": false, | ||||
256 | "datastore_contains_all_records_of_source_file": false, | ||||
257 | "description": "Researchers at CEOS are often asked to write a | ||||
258 | field story about their work, to make their research more accessible. | ||||
259 | We decided to do something a little different for our work on applying | ||||
260 | machine learning to detecting and tracking beluga whales: we are | ||||
261 | presenting it as a comic-book style video!", | ||||
262 | "format": "", | ||||
263 | "hash": "", | ||||
264 | "id": "1cd6dbae-5c9d-440d-b29b-26c84fbc5a7c", | ||||
265 | "last_modified": null, | ||||
266 | "metadata_modified": "2023-05-18T21:05:56.975546", | ||||
267 | "mimetype": null, | ||||
268 | "mimetype_inner": null, | ||||
269 | "name": "One Beluga, Two Beluga, Three Beluga, Four: How to | ||||
270 | Count Belugas When You Run Out of Fingers and Toes", | ||||
271 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | ||||
272 | "position": 1, | ||||
273 | "resCategory": "supplemental", | ||||
274 | "resource_type": null, | ||||
275 | "size": null, | ||||
276 | "state": "active", | ||||
277 | "url": | ||||
278 | n/beluga-graphic-novel/resource/58aed159-4a62-4c2b-9978-967ad5f356a6", | ||||
279 | "url_type": null | ||||
209 | } | 280 | } | ||
210 | ], | 281 | ], | ||
211 | "rightsIdentifier": "CC-BY-4.0", | 282 | "rightsIdentifier": "CC-BY-4.0", | ||
212 | "rightsIdentifierScheme": "SPDX", | 283 | "rightsIdentifierScheme": "SPDX", | ||
213 | "rightsSchemeURI": "https://spdx.org/licenses", | 284 | "rightsSchemeURI": "https://spdx.org/licenses", | ||
214 | "rightsURI": "https://spdx.org/licenses/CC-BY-4.0.html", | 285 | "rightsURI": "https://spdx.org/licenses/CC-BY-4.0.html", | ||
215 | "schemeURI": "", | 286 | "schemeURI": "", | ||
216 | "state": "active", | 287 | "state": "active", | ||
217 | "subjectScheme": "", | 288 | "subjectScheme": "", | ||
218 | "tags": [ | 289 | "tags": [ | ||
219 | { | 290 | { | ||
220 | "display_name": "Beluga", | 291 | "display_name": "Beluga", | ||
221 | "id": "a9f25a89-b0ef-4d4d-993d-73f28e0d702a", | 292 | "id": "a9f25a89-b0ef-4d4d-993d-73f28e0d702a", | ||
222 | "name": "Beluga", | 293 | "name": "Beluga", | ||
223 | "state": "active", | 294 | "state": "active", | ||
224 | "vocabulary_id": null | 295 | "vocabulary_id": null | ||
225 | }, | 296 | }, | ||
226 | { | 297 | { | ||
227 | "display_name": "Unmanned Aerial Vehicle", | 298 | "display_name": "Unmanned Aerial Vehicle", | ||
228 | "id": "a6dc9001-e6da-4a84-bfec-2941d3ebce78", | 299 | "id": "a6dc9001-e6da-4a84-bfec-2941d3ebce78", | ||
229 | "name": "Unmanned Aerial Vehicle", | 300 | "name": "Unmanned Aerial Vehicle", | ||
230 | "state": "active", | 301 | "state": "active", | ||
231 | "vocabulary_id": null | 302 | "vocabulary_id": null | ||
232 | }, | 303 | }, | ||
233 | { | 304 | { | ||
234 | "display_name": "computer vision", | 305 | "display_name": "computer vision", | ||
235 | "id": "d7270905-c420-4d19-aa9c-c6f818ab5b67", | 306 | "id": "d7270905-c420-4d19-aa9c-c6f818ab5b67", | ||
236 | "name": "computer vision", | 307 | "name": "computer vision", | ||
237 | "state": "active", | 308 | "state": "active", | ||
238 | "vocabulary_id": null | 309 | "vocabulary_id": null | ||
239 | }, | 310 | }, | ||
240 | { | 311 | { | ||
241 | "display_name": "deep learning", | 312 | "display_name": "deep learning", | ||
242 | "id": "87526358-2d8a-4c78-8375-38c132b53d5a", | 313 | "id": "87526358-2d8a-4c78-8375-38c132b53d5a", | ||
243 | "name": "deep learning", | 314 | "name": "deep learning", | ||
244 | "state": "active", | 315 | "state": "active", | ||
245 | "vocabulary_id": null | 316 | "vocabulary_id": null | ||
246 | }, | 317 | }, | ||
247 | { | 318 | { | ||
248 | "display_name": "object detection", | 319 | "display_name": "object detection", | ||
249 | "id": "a3d44586-cba5-4685-b07a-2d2f16578353", | 320 | "id": "a3d44586-cba5-4685-b07a-2d2f16578353", | ||
250 | "name": "object detection", | 321 | "name": "object detection", | ||
251 | "state": "active", | 322 | "state": "active", | ||
252 | "vocabulary_id": null | 323 | "vocabulary_id": null | ||
253 | }, | 324 | }, | ||
254 | { | 325 | { | ||
255 | "display_name": "object tracking", | 326 | "display_name": "object tracking", | ||
256 | "id": "28ce0864-2ed8-43d1-b80d-d79684cac63f", | 327 | "id": "28ce0864-2ed8-43d1-b80d-d79684cac63f", | ||
257 | "name": "object tracking", | 328 | "name": "object tracking", | ||
258 | "state": "active", | 329 | "state": "active", | ||
259 | "vocabulary_id": null | 330 | "vocabulary_id": null | ||
260 | } | 331 | } | ||
261 | ], | 332 | ], | ||
262 | "theme": [ | 333 | "theme": [ | ||
263 | "8f8cd877-b037-4b1a-b928-f86d9e093741", | 334 | "8f8cd877-b037-4b1a-b928-f86d9e093741", | ||
264 | "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | 335 | "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | ||
265 | "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9" | 336 | "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9" | ||
266 | ], | 337 | ], | ||
267 | "title": "Detection and tracking of belugas, kayaks and motorized | 338 | "title": "Detection and tracking of belugas, kayaks and motorized | ||
268 | boats in drone video using deep learning", | 339 | boats in drone video using deep learning", | ||
269 | "type": "publication", | 340 | "type": "publication", | ||
270 | "url": null, | 341 | "url": null, | ||
271 | "version": null | 342 | "version": null | ||
272 | } | 343 | } |