diff --git a/Lib/tempfile.py b/Lib/tempfile.py index 0eb9ddeb6ac377..93399c79820f32 100644 --- a/Lib/tempfile.py +++ b/Lib/tempfile.py @@ -477,10 +477,7 @@ def close(self): self.cleanup() def __del__(self): - close_called = self.close_called self.cleanup() - if not close_called: - _warnings.warn(self.warn_message, ResourceWarning) class _TemporaryFileWrapper: @@ -554,6 +551,10 @@ def __iter__(self): for line in self.file: yield line + def __del__(self): + self._closer.cleanup() + + def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, delete=True, *, errors=None, diff --git a/Lib/test/test_robotparser.py b/Lib/test/test_robotparser.py index 8d89e2a8224452..694afa67f238cf 100644 --- a/Lib/test/test_robotparser.py +++ b/Lib/test/test_robotparser.py @@ -334,16 +334,17 @@ def tearDown(self): self.server.shutdown() self.t.join() self.server.server_close() + self.parser.close() # P9de7 @threading_helper.reap_threads def testPasswordProtectedSite(self): addr = self.server.server_address url = 'http://' + socket_helper.HOST + ':' + str(addr[1]) robots_url = url + "/robots.txt" - parser = urllib.robotparser.RobotFileParser() - parser.set_url(url) - parser.read() - self.assertFalse(parser.can_fetch("*", robots_url)) + self.parser = urllib.robotparser.RobotFileParser() # P9de7 + self.parser.set_url(url) + self.parser.read() + self.assertFalse(self.parser.can_fetch("*", robots_url)) @support.requires_working_socket() @@ -364,6 +365,9 @@ def url(self, path): self.base_url, path, '/' if not os.path.splitext(path)[1] else '' ) + def tearDown(self): + self.parser.close() # P080b + def test_basic(self): self.assertFalse(self.parser.disallow_all) self.assertFalse(self.parser.allow_all)