Changes
On April 7, 2022 at 2:51:12 PM CDT, Casey Clair:
-
Changed value of field
related_datasets
to["ff83795d-4140-4b01-aba8-b83c66c70c9a", "b5f259b4-3ace-4750-bfb0-47c4e794082f"]
in Detection and tracking of belugas, kayaks and motorized boats in drone video using deep learning
f | 1 | { | f | 1 | { |
2 | "Author": [ | 2 | "Author": [ | ||
3 | { | 3 | { | ||
4 | "affiliation": "Centre for Earth Observation Science - | 4 | "affiliation": "Centre for Earth Observation Science - | ||
5 | University of Manitoba", | 5 | University of Manitoba", | ||
6 | "creatorName": "Harasyn, Madison", | 6 | "creatorName": "Harasyn, Madison", | ||
7 | "email": "Madison.harasyn@umanitoba.ca", | 7 | "email": "Madison.harasyn@umanitoba.ca", | ||
8 | "nameIdentifier": "https://orcid.org/0000-0002-5741-6766", | 8 | "nameIdentifier": "https://orcid.org/0000-0002-5741-6766", | ||
9 | "nameIdentifierScheme": "ORCID", | 9 | "nameIdentifierScheme": "ORCID", | ||
10 | "nameType": "Personal", | 10 | "nameType": "Personal", | ||
11 | "schemeURI": "http://orcid.org/" | 11 | "schemeURI": "http://orcid.org/" | ||
12 | }, | 12 | }, | ||
13 | { | 13 | { | ||
14 | "affiliation": "Centre for Earth Observation Science - | 14 | "affiliation": "Centre for Earth Observation Science - | ||
15 | University of Manitoba", | 15 | University of Manitoba", | ||
16 | "creatorName": "Chan, Wayne", | 16 | "creatorName": "Chan, Wayne", | ||
17 | "email": "wayne.chan@umanitoba.ca", | 17 | "email": "wayne.chan@umanitoba.ca", | ||
18 | "nameIdentifier": "", | 18 | "nameIdentifier": "", | ||
19 | "nameType": "Personal" | 19 | "nameType": "Personal" | ||
20 | }, | 20 | }, | ||
21 | { | 21 | { | ||
22 | "affiliation": "Centre for Earth Observation Science - | 22 | "affiliation": "Centre for Earth Observation Science - | ||
23 | University of Manitoba", | 23 | University of Manitoba", | ||
24 | "creatorName": "Ausen, Emma", | 24 | "creatorName": "Ausen, Emma", | ||
25 | "email": "emma.ausen@umanitoba.ca", | 25 | "email": "emma.ausen@umanitoba.ca", | ||
26 | "nameIdentifier": "", | 26 | "nameIdentifier": "", | ||
27 | "nameType": "Personal" | 27 | "nameType": "Personal" | ||
28 | }, | 28 | }, | ||
29 | { | 29 | { | ||
30 | "affiliation": "Centre for Earth Observation Science - | 30 | "affiliation": "Centre for Earth Observation Science - | ||
31 | University of Manitoba", | 31 | University of Manitoba", | ||
32 | "creatorName": "Barber, David", | 32 | "creatorName": "Barber, David", | ||
33 | "email": "david.barber@umanitoba.ca", | 33 | "email": "david.barber@umanitoba.ca", | ||
34 | "nameIdentifier": "0000-0001-9466-3291", | 34 | "nameIdentifier": "0000-0001-9466-3291", | ||
35 | "nameIdentifierScheme": "ORCID", | 35 | "nameIdentifierScheme": "ORCID", | ||
36 | "nameType": "Personal", | 36 | "nameType": "Personal", | ||
37 | "schemeURI": "http://orcid.org/" | 37 | "schemeURI": "http://orcid.org/" | ||
38 | } | 38 | } | ||
39 | ], | 39 | ], | ||
40 | "Identifier": "10.1139/juvs-2021-0024", | 40 | "Identifier": "10.1139/juvs-2021-0024", | ||
41 | "PublicationYear": "2022", | 41 | "PublicationYear": "2022", | ||
42 | "Publisher": "Drone Systems and Applications", | 42 | "Publisher": "Drone Systems and Applications", | ||
43 | "ResourceType": "journal article", | 43 | "ResourceType": "journal article", | ||
44 | "Rights": "Creative Commons Attribution 4.0 International", | 44 | "Rights": "Creative Commons Attribution 4.0 International", | ||
45 | "Version": "1.0", | 45 | "Version": "1.0", | ||
46 | "author": null, | 46 | "author": null, | ||
47 | "author_email": null, | 47 | "author_email": null, | ||
48 | "awardTitle": "The Canada Excellence Research Chair (CERC) and the | 48 | "awardTitle": "The Canada Excellence Research Chair (CERC) and the | ||
49 | Canada Research Chair (CRC programs)", | 49 | Canada Research Chair (CRC programs)", | ||
50 | "awardURI": "https://www.cerc.gc.ca/", | 50 | "awardURI": "https://www.cerc.gc.ca/", | ||
51 | "citation": "Madison L.Harasyn, Wayne S.Chan, Emma L.Ausen, and | 51 | "citation": "Madison L.Harasyn, Wayne S.Chan, Emma L.Ausen, and | ||
52 | David G.Barber. Detection and tracking of belugas, kayaks and | 52 | David G.Barber. Detection and tracking of belugas, kayaks and | ||
53 | motorized boats in drone video using deep learning. Drone Systems and | 53 | motorized boats in drone video using deep learning. Drone Systems and | ||
54 | Applications. 10(1): 77-96. https://doi.org/10.1139/juvs-2021-0024", | 54 | Applications. 10(1): 77-96. https://doi.org/10.1139/juvs-2021-0024", | ||
55 | "creator_user_id": "cde7b848-a882-4fc7-97c9-670417bd6b43", | 55 | "creator_user_id": "cde7b848-a882-4fc7-97c9-670417bd6b43", | ||
56 | "descriptionType": "Abstract", | 56 | "descriptionType": "Abstract", | ||
57 | "funderIdentifier": "", | 57 | "funderIdentifier": "", | ||
58 | "funderIdentifierType": "", | 58 | "funderIdentifierType": "", | ||
59 | "funderName": "", | 59 | "funderName": "", | ||
60 | "funderSchemeURI": "", | 60 | "funderSchemeURI": "", | ||
61 | "grantNumber": "", | 61 | "grantNumber": "", | ||
n | 62 | "groups": [], | n | 62 | "groups": [ |
63 | { | ||||
64 | "description": "Inland water features, drainage systems and | ||||
65 | their characteristics. Examples of data you can find here include | ||||
66 | river and lake data, water quality data. \r\n\r\nIn CEOS, related | ||||
67 | research themes include biogeochemistry, Inland lakes and waters, | ||||
68 | modelling, remote sensing and technology, trace metals and | ||||
69 | contaminants.", | ||||
70 | "display_name": "Freshwater", | ||||
71 | "id": "8f8cd877-b037-4b1a-b928-f86d9e093741", | ||||
72 | "image_display_url": | ||||
73 | /data/uploads/group/2021-10-31-211937.658599hyinspirehydrography.svg", | ||||
74 | "name": "freshwater", | ||||
75 | "title": "Freshwater" | ||||
76 | }, | ||||
77 | { | ||||
78 | "description": "Features and characteristics of salt water | ||||
79 | bodies.\r\n\r\nIn CEOS, related research themes include | ||||
80 | biogeochemistry, modelling, marine mammals, oil spill response, | ||||
81 | physical oceanography, remote sensing and technology and trace metals | ||||
82 | and contaminants", | ||||
83 | "display_name": "Marine", | ||||
84 | "id": "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | ||||
85 | "image_display_url": | ||||
86 | ata/uploads/group/2021-10-31-211516.365746ofinspireoceanographic.svg", | ||||
87 | "name": "marine", | ||||
88 | "title": "Marine" | ||||
89 | } | ||||
90 | ], | ||||
63 | "id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | 91 | "id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | ||
64 | "isopen": false, | 92 | "isopen": false, | ||
65 | "keywords": "Unmanned Aerial Vehicle,beluga,computer vision,deep | 93 | "keywords": "Unmanned Aerial Vehicle,beluga,computer vision,deep | ||
66 | learning,object detection,object tracking", | 94 | learning,object detection,object tracking", | ||
67 | "language": "English", | 95 | "language": "English", | ||
68 | "licenceType": "Open", | 96 | "licenceType": "Open", | ||
69 | "license_id": null, | 97 | "license_id": null, | ||
70 | "license_title": null, | 98 | "license_title": null, | ||
71 | "maintainer": null, | 99 | "maintainer": null, | ||
72 | "maintainer_email": null, | 100 | "maintainer_email": null, | ||
73 | "metadata_created": "2022-04-07T19:45:13.021227", | 101 | "metadata_created": "2022-04-07T19:45:13.021227", | ||
n | 74 | "metadata_modified": "2022-04-07T19:50:12.725300", | n | 102 | "metadata_modified": "2022-04-07T19:51:11.914138", |
75 | "name": | 103 | "name": | ||
76 | elugas-kayaks-and-motorized-boats-in-drone-video-using-deep-learning", | 104 | elugas-kayaks-and-motorized-boats-in-drone-video-using-deep-learning", | ||
77 | "notes": "Aerial imagery surveys are commonly used in marine mammal | 105 | "notes": "Aerial imagery surveys are commonly used in marine mammal | ||
78 | research to determine population size, distribution and habitat use. | 106 | research to determine population size, distribution and habitat use. | ||
79 | Analysis of aerial photos involves hours of manually identifying | 107 | Analysis of aerial photos involves hours of manually identifying | ||
80 | individuals present in each image and converting raw counts into | 108 | individuals present in each image and converting raw counts into | ||
81 | useable biological statistics. Our research proposes the use of deep | 109 | useable biological statistics. Our research proposes the use of deep | ||
82 | learning algorithms to increase the efficiency of the marine mammal | 110 | learning algorithms to increase the efficiency of the marine mammal | ||
83 | research workflow. To test the feasibility of this proposal, the | 111 | research workflow. To test the feasibility of this proposal, the | ||
84 | existing YOLOv4 convolutional neural network model was trained to | 112 | existing YOLOv4 convolutional neural network model was trained to | ||
85 | detect belugas, kayaks and motorized boats in oblique drone imagery, | 113 | detect belugas, kayaks and motorized boats in oblique drone imagery, | ||
86 | collected from a stationary tethered system. Automated computer-based | 114 | collected from a stationary tethered system. Automated computer-based | ||
87 | object detection achieved the following precision and recall, | 115 | object detection achieved the following precision and recall, | ||
88 | respectively, for each class: beluga = 74%/72%; boat = 97%/99%; and | 116 | respectively, for each class: beluga = 74%/72%; boat = 97%/99%; and | ||
89 | kayak = 96%/96%. We then tested the performance of computer vision | 117 | kayak = 96%/96%. We then tested the performance of computer vision | ||
90 | tracking of belugas and occupied watercraft in drone videos using the | 118 | tracking of belugas and occupied watercraft in drone videos using the | ||
91 | DeepSORT tracking algorithm, which achieved a multiple-object tracking | 119 | DeepSORT tracking algorithm, which achieved a multiple-object tracking | ||
92 | accuracy (MOTA) ranging from 37% to 88% and multiple object tracking | 120 | accuracy (MOTA) ranging from 37% to 88% and multiple object tracking | ||
93 | precision (MOTP) between 63% and 86%. Results from this research | 121 | precision (MOTP) between 63% and 86%. Results from this research | ||
94 | indicate that deep learning technology can detect and track features | 122 | indicate that deep learning technology can detect and track features | ||
95 | more consistently than human annotators, allowing for larger datasets | 123 | more consistently than human annotators, allowing for larger datasets | ||
96 | to be processed within a fraction of the time while avoiding | 124 | to be processed within a fraction of the time while avoiding | ||
97 | discrepancies introduced by labeling fatigue or multiple human | 125 | discrepancies introduced by labeling fatigue or multiple human | ||
98 | annotators.", | 126 | annotators.", | ||
99 | "num_resources": 1, | 127 | "num_resources": 1, | ||
100 | "num_tags": 6, | 128 | "num_tags": 6, | ||
101 | "organization": { | 129 | "organization": { | ||
102 | "approval_status": "approved", | 130 | "approval_status": "approved", | ||
103 | "created": "2017-07-21T13:15:49.935872", | 131 | "created": "2017-07-21T13:15:49.935872", | ||
104 | "description": "The Centre for Earth Observation Science (CEOS) | 132 | "description": "The Centre for Earth Observation Science (CEOS) | ||
105 | was established in 1994 with a mandate to research, preserve and | 133 | was established in 1994 with a mandate to research, preserve and | ||
106 | communicate knowledge of Earth system processes using the technologies | 134 | communicate knowledge of Earth system processes using the technologies | ||
107 | of Earth Observation Science. Research is multidisciplinary and | 135 | of Earth Observation Science. Research is multidisciplinary and | ||
108 | collaborative seeking to understand the complex interrelationships | 136 | collaborative seeking to understand the complex interrelationships | ||
109 | between elements of Earth systems, and how these systems will likely | 137 | between elements of Earth systems, and how these systems will likely | ||
110 | respond to climate change. Although researchers have worked in many | 138 | respond to climate change. Although researchers have worked in many | ||
111 | regions, the Arctic marine system has always been a unifying focus of | 139 | regions, the Arctic marine system has always been a unifying focus of | ||
112 | activity.\r\n\r\nIn 2012, CEOS, along with the Greenland Climate | 140 | activity.\r\n\r\nIn 2012, CEOS, along with the Greenland Climate | ||
113 | Research Centre (GCRC, Nuuk, Greenland) and the Arctic Research Centre | 141 | Research Centre (GCRC, Nuuk, Greenland) and the Arctic Research Centre | ||
114 | (ARC, Aarhus, Denmark) established the Arctic Science Partnership, | 142 | (ARC, Aarhus, Denmark) established the Arctic Science Partnership, | ||
115 | thereby integrating academic and research initiatives.\r\n\r\nAreas of | 143 | thereby integrating academic and research initiatives.\r\n\r\nAreas of | ||
116 | existing research activity are divided among key themes:\r\n\r\nArctic | 144 | existing research activity are divided among key themes:\r\n\r\nArctic | ||
117 | Anthropology/Paleoclimatology: LiDAR scanning and digital site | 145 | Anthropology/Paleoclimatology: LiDAR scanning and digital site | ||
118 | preservation, archaeo-geophysics, permafrost degredation, lithic | 146 | preservation, archaeo-geophysics, permafrost degredation, lithic | ||
119 | morphometrics, zooarchaeology, proxy studies, paleodistribution of sea | 147 | morphometrics, zooarchaeology, proxy studies, paleodistribution of sea | ||
120 | ice, landscape learning, Paleo-Eskimo culture, Thule Inuit culture, | 148 | ice, landscape learning, Paleo-Eskimo culture, Thule Inuit culture, | ||
121 | ethnographic analogy, traditional knowledge, climate change and | 149 | ethnographic analogy, traditional knowledge, climate change and | ||
122 | northern heritage resource management.\r\n\r\nAtmospheric | 150 | northern heritage resource management.\r\n\r\nAtmospheric | ||
123 | Studies/Meteorology: Boundary layer, precipitation, clouds, storms and | 151 | Studies/Meteorology: Boundary layer, precipitation, clouds, storms and | ||
124 | extreme weather, circulation, eddy correlations, polar vortex, | 152 | extreme weather, circulation, eddy correlations, polar vortex, | ||
125 | climate, teleconnections, geophysical fluid dynamics, flux and energy | 153 | climate, teleconnections, geophysical fluid dynamics, flux and energy | ||
126 | budgets, ocean-sea ice-atmosphere interface, radiative transfer, ice | 154 | budgets, ocean-sea ice-atmosphere interface, radiative transfer, ice | ||
127 | albedo feedback, cloud radiative forcing, pCO2. | 155 | albedo feedback, cloud radiative forcing, pCO2. | ||
128 | \r\n\r\nBiogeochemistry: Organic carbon, greenhouse gases, bubbles, | 156 | \r\n\r\nBiogeochemistry: Organic carbon, greenhouse gases, bubbles, | ||
129 | Ikaite, carbonate chemistry, CO2 fluxes, mercury and other trace | 157 | Ikaite, carbonate chemistry, CO2 fluxes, mercury and other trace | ||
130 | metals, minerals, hydrocarbons, brine processes, otolith | 158 | metals, minerals, hydrocarbons, brine processes, otolith | ||
131 | microchemistry, sediments, biomarkers. \r\n\r\nContaminants: Mercury, | 159 | microchemistry, sediments, biomarkers. \r\n\r\nContaminants: Mercury, | ||
132 | trace metals, PAHs, source, transport, transformation, pathways, | 160 | trace metals, PAHs, source, transport, transformation, pathways, | ||
133 | bioaccumulations, marine ecosystems, marine chemistry. \r\nEarth | 161 | bioaccumulations, marine ecosystems, marine chemistry. \r\nEarth | ||
134 | Observation Science: Active and passive microwave, LiDAR, EM | 162 | Observation Science: Active and passive microwave, LiDAR, EM | ||
135 | induction, spatial-temporal analysis, forward and inverse scattering | 163 | induction, spatial-temporal analysis, forward and inverse scattering | ||
136 | models, complex permittivity, ocean colour, ocean surface roughness, | 164 | models, complex permittivity, ocean colour, ocean surface roughness, | ||
137 | NIR, TIR, satellite telemetry, GPS. Ice-Associated Biology: | 165 | NIR, TIR, satellite telemetry, GPS. Ice-Associated Biology: | ||
138 | Biophysical processes, primary production; ice algae, ice | 166 | Biophysical processes, primary production; ice algae, ice | ||
139 | microbiology, bio-optics, under-ice phytoplankton. \r\n\r\nInland | 167 | microbiology, bio-optics, under-ice phytoplankton. \r\n\r\nInland | ||
140 | Lakes and Waters: Hydrologic connectivity, watershed systems, sediment | 168 | Lakes and Waters: Hydrologic connectivity, watershed systems, sediment | ||
141 | transport, nutrient transport, contaminants, landscape processes, | 169 | transport, nutrient transport, contaminants, landscape processes, | ||
142 | remote sensing, freshwater-marine coupling. Marine Mammals: Seals, | 170 | remote sensing, freshwater-marine coupling. Marine Mammals: Seals, | ||
143 | whales, habitat, conservation, satellite telemetry, distribution, | 171 | whales, habitat, conservation, satellite telemetry, distribution, | ||
144 | population studies, prey behaviour, bioacoustics.\r\n\r\nModelling: | 172 | population studies, prey behaviour, bioacoustics.\r\n\r\nModelling: | ||
145 | Simulation of sea ice and oceanic regional processes, Nucleus for | 173 | Simulation of sea ice and oceanic regional processes, Nucleus for | ||
146 | European Modelling of the Ocean (NEMO), ice-ocean modelling and | 174 | European Modelling of the Ocean (NEMO), ice-ocean modelling and | ||
147 | interactions, hind cast simulations and projections for sea ice state | 175 | interactions, hind cast simulations and projections for sea ice state | ||
148 | and ocean variables based on CMIP5 scenarios and MIROC5 forcing, | 176 | and ocean variables based on CMIP5 scenarios and MIROC5 forcing, | ||
149 | validation.\r\n\r\nOceanography: Circulation, temperature, in-flow and | 177 | validation.\r\n\r\nOceanography: Circulation, temperature, in-flow and | ||
150 | out-flow shelves, water dynamics, microturbulence, Beaufort Gyre, eddy | 178 | out-flow shelves, water dynamics, microturbulence, Beaufort Gyre, eddy | ||
151 | correlations.\r\n\r\nSea Ice Geophysics:Thermodynamic and dynamic | 179 | correlations.\r\n\r\nSea Ice Geophysics:Thermodynamic and dynamic | ||
152 | processes, extreme ice features and hazards, snow, ridges, | 180 | processes, extreme ice features and hazards, snow, ridges, | ||
153 | polynyas.\r\n\r\nTraditional and Local Knowledge: Indigenous cultures, | 181 | polynyas.\r\n\r\nTraditional and Local Knowledge: Indigenous cultures, | ||
154 | Inuit, Inuvialuit, oral history, toponomy, mobility and settlement, | 182 | Inuit, Inuvialuit, oral history, toponomy, mobility and settlement, | ||
155 | hunting, food security, sea ice use, community-based research, | 183 | hunting, food security, sea ice use, community-based research, | ||
156 | community-based monitoring, two ways of knowing.", | 184 | community-based monitoring, two ways of knowing.", | ||
157 | "id": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | 185 | "id": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | ||
158 | "image_url": "2021-11-13-003953.952874UMLogoHORZ.jpg", | 186 | "image_url": "2021-11-13-003953.952874UMLogoHORZ.jpg", | ||
159 | "is_organization": true, | 187 | "is_organization": true, | ||
160 | "name": "ceos2", | 188 | "name": "ceos2", | ||
161 | "state": "active", | 189 | "state": "active", | ||
162 | "title": "CEOS", | 190 | "title": "CEOS", | ||
163 | "type": "organization" | 191 | "type": "organization" | ||
164 | }, | 192 | }, | ||
165 | "owner_org": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | 193 | "owner_org": "9e21f6b6-d13f-4ba2-a379-fd962f507071", | ||
166 | "private": false, | 194 | "private": false, | ||
167 | "relatedResources": [ | 195 | "relatedResources": [ | ||
168 | { | 196 | { | ||
169 | "RelatedIdentifier": "", | 197 | "RelatedIdentifier": "", | ||
170 | "ResourceTypeGeneral": "", | 198 | "ResourceTypeGeneral": "", | ||
171 | "name": "", | 199 | "name": "", | ||
172 | "relatedIdentifierType": "", | 200 | "relatedIdentifierType": "", | ||
173 | "relationType": "", | 201 | "relationType": "", | ||
174 | "resourceType": "Online Resource", | 202 | "resourceType": "Online Resource", | ||
175 | "seriesName": "" | 203 | "seriesName": "" | ||
176 | } | 204 | } | ||
177 | ], | 205 | ], | ||
t | 178 | "related_datasets": "[]", | t | 206 | "related_datasets": "[\"ff83795d-4140-4b01-aba8-b83c66c70c9a\", |
207 | \"b5f259b4-3ace-4750-bfb0-47c4e794082f\"]", | ||||
179 | "related_programs": "[]", | 208 | "related_programs": "[]", | ||
180 | "relationships_as_object": [], | 209 | "relationships_as_object": [], | ||
181 | "relationships_as_subject": [], | 210 | "relationships_as_subject": [], | ||
182 | "resources": [ | 211 | "resources": [ | ||
183 | { | 212 | { | ||
184 | "cache_last_updated": null, | 213 | "cache_last_updated": null, | ||
185 | "cache_url": null, | 214 | "cache_url": null, | ||
186 | "created": "2022-04-07T19:49:13.974750", | 215 | "created": "2022-04-07T19:49:13.974750", | ||
187 | "datastore_active": false, | 216 | "datastore_active": false, | ||
188 | "datastore_contains_all_records_of_source_file": false, | 217 | "datastore_contains_all_records_of_source_file": false, | ||
189 | "description": "Churchill Beluga Boat Drone Imagery related | 218 | "description": "Churchill Beluga Boat Drone Imagery related | ||
190 | journal article published in Drone Systems and Applications.", | 219 | journal article published in Drone Systems and Applications.", | ||
191 | "format": "", | 220 | "format": "", | ||
192 | "hash": "", | 221 | "hash": "", | ||
193 | "id": "5bcbb0bc-425b-4fad-b7ff-4c8599043dcf", | 222 | "id": "5bcbb0bc-425b-4fad-b7ff-4c8599043dcf", | ||
194 | "last_modified": null, | 223 | "last_modified": null, | ||
195 | "metadata_modified": "2022-04-07T19:49:14.249368", | 224 | "metadata_modified": "2022-04-07T19:49:14.249368", | ||
196 | "mimetype": null, | 225 | "mimetype": null, | ||
197 | "mimetype_inner": null, | 226 | "mimetype_inner": null, | ||
198 | "name": "Detection and tracking of belugas, kayaks and motorized | 227 | "name": "Detection and tracking of belugas, kayaks and motorized | ||
199 | boats in drone video using deep learning", | 228 | boats in drone video using deep learning", | ||
200 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | 229 | "package_id": "54b0d7a1-8536-4d40-b1bb-daad81805f43", | ||
201 | "position": 0, | 230 | "position": 0, | ||
202 | "resCategory": "supplemental", | 231 | "resCategory": "supplemental", | ||
203 | "resource_type": null, | 232 | "resource_type": null, | ||
204 | "size": null, | 233 | "size": null, | ||
205 | "state": "active", | 234 | "state": "active", | ||
206 | "url": "https://doi.org/10.1139/juvs-2021-0024", | 235 | "url": "https://doi.org/10.1139/juvs-2021-0024", | ||
207 | "url_type": null | 236 | "url_type": null | ||
208 | } | 237 | } | ||
209 | ], | 238 | ], | ||
210 | "rightsIdentifier": "CC-BY-4.0", | 239 | "rightsIdentifier": "CC-BY-4.0", | ||
211 | "rightsIdentifierScheme": "SPDX", | 240 | "rightsIdentifierScheme": "SPDX", | ||
212 | "rightsSchemeURI": "https://spdx.org/licenses", | 241 | "rightsSchemeURI": "https://spdx.org/licenses", | ||
213 | "rightsURI": "https://spdx.org/licenses/CC-BY-4.0.html", | 242 | "rightsURI": "https://spdx.org/licenses/CC-BY-4.0.html", | ||
214 | "schemeURI": "", | 243 | "schemeURI": "", | ||
215 | "state": "active", | 244 | "state": "active", | ||
216 | "subjectScheme": "", | 245 | "subjectScheme": "", | ||
217 | "tags": [ | 246 | "tags": [ | ||
218 | { | 247 | { | ||
219 | "display_name": "Unmanned Aerial Vehicle", | 248 | "display_name": "Unmanned Aerial Vehicle", | ||
220 | "id": "a6dc9001-e6da-4a84-bfec-2941d3ebce78", | 249 | "id": "a6dc9001-e6da-4a84-bfec-2941d3ebce78", | ||
221 | "name": "Unmanned Aerial Vehicle", | 250 | "name": "Unmanned Aerial Vehicle", | ||
222 | "state": "active", | 251 | "state": "active", | ||
223 | "vocabulary_id": null | 252 | "vocabulary_id": null | ||
224 | }, | 253 | }, | ||
225 | { | 254 | { | ||
226 | "display_name": "beluga", | 255 | "display_name": "beluga", | ||
227 | "id": "286b2f82-a071-41cc-bc3b-7ea986233649", | 256 | "id": "286b2f82-a071-41cc-bc3b-7ea986233649", | ||
228 | "name": "beluga", | 257 | "name": "beluga", | ||
229 | "state": "active", | 258 | "state": "active", | ||
230 | "vocabulary_id": null | 259 | "vocabulary_id": null | ||
231 | }, | 260 | }, | ||
232 | { | 261 | { | ||
233 | "display_name": "computer vision", | 262 | "display_name": "computer vision", | ||
234 | "id": "d7270905-c420-4d19-aa9c-c6f818ab5b67", | 263 | "id": "d7270905-c420-4d19-aa9c-c6f818ab5b67", | ||
235 | "name": "computer vision", | 264 | "name": "computer vision", | ||
236 | "state": "active", | 265 | "state": "active", | ||
237 | "vocabulary_id": null | 266 | "vocabulary_id": null | ||
238 | }, | 267 | }, | ||
239 | { | 268 | { | ||
240 | "display_name": "deep learning", | 269 | "display_name": "deep learning", | ||
241 | "id": "87526358-2d8a-4c78-8375-38c132b53d5a", | 270 | "id": "87526358-2d8a-4c78-8375-38c132b53d5a", | ||
242 | "name": "deep learning", | 271 | "name": "deep learning", | ||
243 | "state": "active", | 272 | "state": "active", | ||
244 | "vocabulary_id": null | 273 | "vocabulary_id": null | ||
245 | }, | 274 | }, | ||
246 | { | 275 | { | ||
247 | "display_name": "object detection", | 276 | "display_name": "object detection", | ||
248 | "id": "a3d44586-cba5-4685-b07a-2d2f16578353", | 277 | "id": "a3d44586-cba5-4685-b07a-2d2f16578353", | ||
249 | "name": "object detection", | 278 | "name": "object detection", | ||
250 | "state": "active", | 279 | "state": "active", | ||
251 | "vocabulary_id": null | 280 | "vocabulary_id": null | ||
252 | }, | 281 | }, | ||
253 | { | 282 | { | ||
254 | "display_name": "object tracking", | 283 | "display_name": "object tracking", | ||
255 | "id": "28ce0864-2ed8-43d1-b80d-d79684cac63f", | 284 | "id": "28ce0864-2ed8-43d1-b80d-d79684cac63f", | ||
256 | "name": "object tracking", | 285 | "name": "object tracking", | ||
257 | "state": "active", | 286 | "state": "active", | ||
258 | "vocabulary_id": null | 287 | "vocabulary_id": null | ||
259 | } | 288 | } | ||
260 | ], | 289 | ], | ||
261 | "theme": [ | 290 | "theme": [ | ||
262 | "8f8cd877-b037-4b1a-b928-f86d9e093741", | 291 | "8f8cd877-b037-4b1a-b928-f86d9e093741", | ||
263 | "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | 292 | "98238b1c-5be8-41ad-8c6e-74cdc4f5f369", | ||
264 | "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9" | 293 | "3ec49cbb-4da6-4fe8-8d54-5b6ce03b49d9" | ||
265 | ], | 294 | ], | ||
266 | "title": "Detection and tracking of belugas, kayaks and motorized | 295 | "title": "Detection and tracking of belugas, kayaks and motorized | ||
267 | boats in drone video using deep learning", | 296 | boats in drone video using deep learning", | ||
268 | "type": "publication", | 297 | "type": "publication", | ||
269 | "url": null, | 298 | "url": null, | ||
270 | "version": null | 299 | "version": null | ||
271 | } | 300 | } |