2
2
import urllib .request
3
3
import numpy as np
4
4
import copy
5
+ from deeplake .integrations .labelbox .deeplake_utils import *
5
6
6
7
7
8
def bbox_converter_ (obj , converter , tensor_name , context , generate_labels ):
8
9
ds = context ["ds" ]
9
10
try :
10
- ds .create_tensor (
11
- tensor_name ,
12
- htype = "bbox" ,
13
- dtype = "int32" ,
14
- coords = {"type" : "pixel" , "mode" : "LTWH" },
15
- )
11
+ ds .create_tensor (tensor_name , ** bbox_tensor_create_kwargs_ ())
16
12
except :
17
13
pass
18
14
@@ -73,9 +69,10 @@ def radio_converter_(obj, converter, tensor_name, context, generate_labels):
73
69
try :
74
70
ds .create_tensor (
75
71
tensor_name ,
76
- htype = "class_label" ,
77
- class_names = list (converter .label_mappings [tensor_name ].keys ()),
78
- chunk_compression = "lz4" ,
72
+ ** class_label_tensor_create_kwargs_ (),
73
+ )
74
+ ds [tensor_name ].update_metadata (
75
+ {"class_names" : list (converter .label_mappings [tensor_name ].keys ())}
79
76
)
80
77
except :
81
78
pass
@@ -115,9 +112,10 @@ def checkbox_converter_(obj, converter, tensor_name, context, generate_labels):
115
112
try :
116
113
ds .create_tensor (
117
114
tensor_name ,
118
- htype = "class_label" ,
119
- class_names = list (converter .label_mappings [tensor_name ].keys ()),
120
- chunk_compression = "lz4" ,
115
+ ** class_label_tensor_create_kwargs_ (),
116
+ )
117
+ ds [tensor_name ].update_metadata (
118
+ {"class_names" : list (converter .label_mappings [tensor_name ].keys ())}
121
119
)
122
120
except :
123
121
pass
@@ -149,7 +147,7 @@ def checkbox_converter_nested(row, obj):
149
147
def point_converter_ (obj , converter , tensor_name , context , generate_labels ):
150
148
ds = context ["ds" ]
151
149
try :
152
- ds .create_tensor (tensor_name , htype = "point" , dtype = "int32" )
150
+ ds .create_tensor (tensor_name , ** point_tensor_create_kwargs_ () )
153
151
except :
154
152
pass
155
153
@@ -187,7 +185,7 @@ def interpolator(start, end, progress):
187
185
def line_converter_ (obj , converter , tensor_name , context , generate_labels ):
188
186
ds = context ["ds" ]
189
187
try :
190
- ds .create_tensor (tensor_name , htype = "polygon" , dtype = "int32" )
188
+ ds .create_tensor (tensor_name , ** polygon_tensor_create_kwargs_ () )
191
189
except :
192
190
pass
193
191
@@ -230,20 +228,14 @@ def raster_segmentation_converter_(
230
228
):
231
229
ds = context ["ds" ]
232
230
try :
233
- ds .create_tensor (
234
- tensor_name , htype = "binary_mask" , dtype = "bool" , sample_compression = "lz4"
235
- )
231
+ ds .create_tensor (tensor_name , ** binary_mask_tensor_create_kwargs_ ())
236
232
except :
237
233
pass
238
234
239
235
try :
240
236
if generate_labels :
241
237
ds .create_tensor (
242
- f"{ tensor_name } _labels" ,
243
- htype = "class_label" ,
244
- dtype = "int32" ,
245
- class_names = [],
246
- chunk_compression = "lz4" ,
238
+ f"{ tensor_name } _labels" , ** class_label_tensor_create_kwargs_ ()
247
239
)
248
240
converter .label_mappings [f"{ tensor_name } _labels" ] = dict ()
249
241
except :
@@ -270,30 +262,33 @@ def mask_converter(row, obj):
270
262
converter .label_mappings [f"{ tensor_name } _labels" ][tool_name ] = (
271
263
len (converter .label_mappings [f"{ tensor_name } _labels" ])
272
264
)
273
- ds [f"{ tensor_name } _labels" ].info .update (
274
- class_names = list (
275
- converter .label_mappings [f"{ tensor_name } _labels" ].keys ()
276
- )
265
+ ds [f"{ tensor_name } _labels" ].update_metadata (
266
+ {
267
+ "class_names" : list (
268
+ converter .label_mappings [
269
+ f"{ tensor_name } _labels"
270
+ ].keys ()
271
+ )
272
+ }
277
273
)
278
274
val = []
279
275
try :
280
- val = (
281
- ds [f"{ tensor_name } _labels" ][row ].numpy (aslist = True ).tolist ()
282
- )
276
+ val = ds [f"{ tensor_name } _labels" ].value (row , aslist = True )
283
277
except (KeyError , IndexError ):
284
278
pass
285
-
286
279
val .append (
287
280
converter .label_mappings [f"{ tensor_name } _labels" ][tool_name ]
288
281
)
289
- ds [f"{ tensor_name } _labels" ][ row ] = val
282
+ ds [f"{ tensor_name } _labels" ]. set_value ( row , val )
290
283
291
284
mask = np .array (Image .open (response )).astype (np .bool_ )
292
285
mask = mask [..., np .newaxis ]
293
286
try :
294
287
if generate_labels :
295
- val = ds [tensor_name ][ row ]. numpy ( )
288
+ val = ds [tensor_name ]. value ( row )
296
289
labels = ds [f"{ tensor_name } _labels" ].info ["class_names" ]
290
+ if val is None :
291
+ raise IndexError ()
297
292
if len (labels ) != val .shape [- 1 ]:
298
293
val = np .concatenate (
299
294
[val , np .zeros_like (mask )],
@@ -302,11 +297,11 @@ def mask_converter(row, obj):
302
297
idx = labels .index (tool_name )
303
298
val [:, :, idx ] = np .logical_or (val [:, :, idx ], mask [:, :, 0 ])
304
299
else :
305
- val = np .logical_or (ds [tensor_name ][ row ]. numpy ( ), mask )
300
+ val = np .logical_or (ds [tensor_name ]. value ( row ), mask )
306
301
except (KeyError , IndexError ):
307
302
val = mask
308
303
309
- ds [tensor_name ][ row ] = val
304
+ ds [tensor_name ]. set_value ( row , val )
310
305
except Exception as e :
311
306
print (f"Error downloading mask: { e } " )
312
307
@@ -316,7 +311,7 @@ def mask_converter(row, obj):
316
311
def text_converter_ (obj , converter , tensor_name , context , generate_labels ):
317
312
ds = context ["ds" ]
318
313
try :
319
- ds .create_tensor (tensor_name , htype = "text" , dtype = "str" )
314
+ ds .create_tensor (tensor_name , ** text_tensor_create_kwargs_ () )
320
315
except :
321
316
pass
322
317
0 commit comments