CLN GH22874 replace bare excepts in pandas/io/pytables.py (#22919) · pandas-dev/pandas@8e749a3 (original) (raw)
`@@ -258,7 +258,7 @@ def _tables():
`
258
258
`try:
`
259
259
`_table_file_open_policy_is_strict = (
`
260
260
`tables.file._FILE_OPEN_POLICY == 'strict')
`
261
``
`-
except:
`
``
261
`+
except AttributeError:
`
262
262
`pass
`
263
263
``
264
264
`return _table_mod
`
`@@ -395,11 +395,11 @@ def read_hdf(path_or_buf, key=None, mode='r', **kwargs):
`
395
395
`'contains multiple datasets.')
`
396
396
`key = candidate_only_group._v_pathname
`
397
397
`return store.select(key, auto_close=auto_close, **kwargs)
`
398
``
`-
except:
`
``
398
`+
except (ValueError, TypeError):
`
399
399
`# if there is an error, close the store
`
400
400
`try:
`
401
401
`store.close()
`
402
``
`-
except:
`
``
402
`+
except AttributeError:
`
403
403
`pass
`
404
404
``
405
405
`raise
`
`@@ -517,7 +517,7 @@ def getattr(self, name):
`
517
517
`""" allow attribute access to get stores """
`
518
518
`try:
`
519
519
`return self.get(name)
`
520
``
`-
except:
`
``
520
`+
except (KeyError, ClosedFileError):
`
521
521
`pass
`
522
522
`raise AttributeError("'%s' object has no attribute '%s'" %
`
523
523
` (type(self).name, name))
`
`@@ -675,7 +675,7 @@ def flush(self, fsync=False):
`
675
675
`if fsync:
`
676
676
`try:
`
677
677
`os.fsync(self._handle.fileno())
`
678
``
`-
except:
`
``
678
`+
except OSError:
`
679
679
`pass
`
680
680
``
681
681
`def get(self, key):
`
`@@ -1161,7 +1161,7 @@ def get_node(self, key):
`
1161
1161
`if not key.startswith('/'):
`
1162
1162
`key = '/' + key
`
1163
1163
`return self._handle.get_node(self.root, key)
`
1164
``
`-
except:
`
``
1164
`+
except _table_mod.exceptions.NoSuchNodeError:
`
1165
1165
`return None
`
1166
1166
``
1167
1167
`def get_storer(self, key):
`
`@@ -1270,7 +1270,7 @@ def _validate_format(self, format, kwargs):
`
1270
1270
`# validate
`
1271
1271
`try:
`
1272
1272
`kwargs['format'] = _FORMAT_MAP[format.lower()]
`
1273
``
`-
except:
`
``
1273
`+
except KeyError:
`
1274
1274
`raise TypeError("invalid HDFStore format specified [{0}]"
`
1275
1275
` .format(format))
`
1276
1276
``
`@@ -1307,7 +1307,7 @@ def error(t):
`
1307
1307
``
1308
1308
`try:
`
1309
1309
`pt = _TYPE_MAP[type(value)]
`
1310
``
`-
except:
`
``
1310
`+
except KeyError:
`
1311
1311
`error('_TYPE_MAP')
`
1312
1312
``
1313
1313
`# we are actually a table
`
`@@ -1318,7 +1318,7 @@ def error(t):
`
1318
1318
`if u('table') not in pt:
`
1319
1319
`try:
`
1320
1320
`return globals()[_STORER_MAP[pt]](self, group, **kwargs)
`
1321
``
`-
except:
`
``
1321
`+
except KeyError:
`
1322
1322
`error('_STORER_MAP')
`
1323
1323
``
1324
1324
`# existing node (and must be a table)
`
`@@ -1354,12 +1354,12 @@ def error(t):
`
1354
1354
`fields = group.table._v_attrs.fields
`
1355
1355
`if len(fields) == 1 and fields[0] == u('value'):
`
1356
1356
`tt = u('legacy_frame')
`
1357
``
`-
except:
`
``
1357
`+
except IndexError:
`
1358
1358
`pass
`
1359
1359
``
1360
1360
`try:
`
1361
1361
`return globals()[_TABLE_MAP[tt]](self, group, **kwargs)
`
1362
``
`-
except:
`
``
1362
`+
except KeyError:
`
1363
1363
`error('_TABLE_MAP')
`
1364
1364
``
1365
1365
`def _write_to_group(self, key, value, format, index=True, append=False,
`
`@@ -1624,7 +1624,7 @@ def is_indexed(self):
`
1624
1624
`""" return whether I am an indexed column """
`
1625
1625
`try:
`
1626
1626
`return getattr(self.table.cols, self.cname).is_indexed
`
1627
``
`-
except:
`
``
1627
`+
except AttributeError:
`
1628
1628
`False
`
1629
1629
``
1630
1630
`def copy(self):
`
`@@ -1654,9 +1654,10 @@ def convert(self, values, nan_rep, encoding, errors):
`
1654
1654
`kwargs['freq'] = _ensure_decoded(self.freq)
`
1655
1655
`if self.index_name is not None:
`
1656
1656
`kwargs['name'] = _ensure_decoded(self.index_name)
`
``
1657
`+
making an Index instance could throw a number of different errors
`
1657
1658
`try:
`
1658
1659
`self.values = Index(values, **kwargs)
`
1659
``
`-
except:
`
``
1660
`+
except Exception: # noqa: E722
`
1660
1661
``
1661
1662
`# if the output freq is different that what we recorded,
`
1662
1663
`# it should be None (see also 'doc example part 2')
`
`@@ -1869,7 +1870,7 @@ def create_for_block(
`
1869
1870
`m = re.search(r"values_block_(\d+)", name)
`
1870
1871
`if m:
`
1871
1872
`name = "values_%s" % m.groups()[0]
`
1872
``
`-
except:
`
``
1873
`+
except IndexError:
`
1873
1874
`pass
`
1874
1875
``
1875
1876
`return cls(name=name, cname=cname, **kwargs)
`
`@@ -2232,7 +2233,7 @@ def convert(self, values, nan_rep, encoding, errors):
`
2232
2233
``
2233
2234
`try:
`
2234
2235
`self.data = self.data.astype(dtype, copy=False)
`
2235
``
`-
except:
`
``
2236
`+
except TypeError:
`
2236
2237
`self.data = self.data.astype('O', copy=False)
`
2237
2238
``
2238
2239
`# convert nans / decode
`
`@@ -2325,7 +2326,7 @@ def set_version(self):
`
2325
2326
`self.version = tuple(int(x) for x in version.split('.'))
`
2326
2327
`if len(self.version) == 2:
`
2327
2328
`self.version = self.version + (0,)
`
2328
``
`-
except:
`
``
2329
`+
except AttributeError:
`
2329
2330
`self.version = (0, 0, 0)
`
2330
2331
``
2331
2332
`@property
`
`@@ -2769,7 +2770,7 @@ def write_array(self, key, value, items=None):
`
2769
2770
`else:
`
2770
2771
`try:
`
2771
2772
`items = list(items)
`
2772
``
`-
except:
`
``
2773
`+
except TypeError:
`
2773
2774
`pass
`
2774
2775
`ws = performance_doc % (inferred_type, key, items)
`
2775
2776
`warnings.warn(ws, PerformanceWarning, stacklevel=7)
`
`@@ -2843,7 +2844,7 @@ class SeriesFixed(GenericFixed):
`
2843
2844
`def shape(self):
`
2844
2845
`try:
`
2845
2846
`return len(getattr(self.group, 'values')),
`
2846
``
`-
except:
`
``
2847
`+
except (TypeError, AttributeError):
`
2847
2848
`return None
`
2848
2849
``
2849
2850
`def read(self, **kwargs):
`
`@@ -2961,7 +2962,7 @@ def shape(self):
`
2961
2962
`shape = shape[::-1]
`
2962
2963
``
2963
2964
`return shape
`
2964
``
`-
except:
`
``
2965
`+
except AttributeError:
`
2965
2966
`return None
`
2966
2967
``
2967
2968
`def read(self, start=None, stop=None, **kwargs):
`
`@@ -3495,7 +3496,7 @@ def create_axes(self, axes, obj, validate=True, nan_rep=None,
`
3495
3496
`if axes is None:
`
3496
3497
`try:
`
3497
3498
`axes = _AXES_MAP[type(obj)]
`
3498
``
`-
except:
`
``
3499
`+
except KeyError:
`
3499
3500
`raise TypeError("cannot properly create the storer for: "
`
3500
3501
`"[group->%s,value->%s]"
`
3501
3502
`% (self.group._v_name, type(obj)))
`
`@@ -3614,7 +3615,7 @@ def get_blk_items(mgr, blocks):
`
3614
3615
`b, b_items = by_items.pop(items)
`
3615
3616
`new_blocks.append(b)
`
3616
3617
`new_blk_items.append(b_items)
`
3617
``
`-
except:
`
``
3618
`+
except (IndexError, KeyError):
`
3618
3619
`raise ValueError(
`
3619
3620
`"cannot match existing table structure for [%s] on "
`
3620
3621
`"appending data" % ','.join(pprint_thing(item) for
`
`@@ -3642,7 +3643,7 @@ def get_blk_items(mgr, blocks):
`
3642
3643
`if existing_table is not None and validate:
`
3643
3644
`try:
`
3644
3645
`existing_col = existing_table.values_axes[i]
`
3645
``
`-
except:
`
``
3646
`+
except (IndexError, KeyError):
`
3646
3647
`raise ValueError("Incompatible appended table [%s] with "
`
3647
3648
`"existing table [%s]"
`
3648
3649
`% (blocks, existing_table.values_axes))
`
`@@ -4460,7 +4461,7 @@ def _get_info(info, name):
`
4460
4461
`""" get/create the info for this name """
`
4461
4462
`try:
`
4462
4463
`idx = info[name]
`
4463
``
`-
except:
`
``
4464
`+
except KeyError:
`
4464
4465
`idx = info[name] = dict()
`
4465
4466
`return idx
`
4466
4467
``
`@@ -4782,7 +4783,7 @@ def init(self, table, where=None, start=None, stop=None, **kwargs):
`
4782
4783
` )
`
4783
4784
`self.coordinates = where
`
4784
4785
``
4785
``
`-
except:
`
``
4786
`+
except ValueError:
`
4786
4787
`pass
`
4787
4788
``
4788
4789
`if self.coordinates is None:
`