@@ -646,23 +646,26 @@ def test_group_without_user_agent(self):
646646)
647647class BaseLocalNetworkTestCase :
648648
649- @classmethod
650- def setUpClass (cls ):
649+ def setUp (self ):
651650 # clear _opener global variable
652- cls . addClassCleanup (urllib .request .urlcleanup )
651+ self . addCleanup (urllib .request .urlcleanup )
653652
654- cls .server = HTTPServer ((socket_helper .HOST , 0 ), cls .RobotHandler )
655- cls .addClassCleanup (cls .server .server_close )
653+ self .server = HTTPServer ((socket_helper .HOST , 0 ), self .RobotHandler )
656654
657- t = threading .Thread (
655+ self . t = threading .Thread (
658656 name = 'HTTPServer serving' ,
659- target = cls .server .serve_forever ,
657+ target = self .server .serve_forever ,
660658 # Short poll interval to make the test finish quickly.
661659 # Time between requests is short enough that we won't wake
662660 # up spuriously too many times.
663661 kwargs = {'poll_interval' :0.01 })
664- cls .enterClassContext (threading_helper .start_threads ([t ]))
665- cls .addClassCleanup (cls .server .shutdown )
662+ self .t .daemon = True # In case this function raises.
663+ self .t .start ()
664+
665+ def tearDown (self ):
666+ self .server .shutdown ()
667+ self .t .join ()
668+ self .server .server_close ()
666669
667670
668671SAMPLE_ROBOTS_TXT = b'''\
@@ -684,6 +687,7 @@ def do_GET(self):
684687 def log_message (self , format , * args ):
685688 pass
686689
690+ @threading_helper .reap_threads
687691 def testRead (self ):
688692 # Test that reading a weird robots.txt doesn't fail.
689693 addr = self .server .server_address
@@ -705,62 +709,24 @@ def testRead(self):
705709 self .assertFalse (parser .can_fetch (agent , url + '/%2F[spam]/path' ))
706710
707711
708- class HttpErrorsTestCase (BaseLocalNetworkTestCase , unittest .TestCase ):
712+ class PasswordProtectedSiteTestCase (BaseLocalNetworkTestCase , unittest .TestCase ):
709713 class RobotHandler (BaseHTTPRequestHandler ):
710714
711715 def do_GET (self ):
712- self .send_error (self . server . return_code )
716+ self .send_error (403 , "Forbidden access" )
713717
714718 def log_message (self , format , * args ):
715719 pass
716720
717- def setUp (self ):
718- # Make sure that a valid code is set in the test.
719- self .server .return_code = None
720-
721+ @threading_helper .reap_threads
721722 def testPasswordProtectedSite (self ):
722- self .server .return_code = 403
723723 addr = self .server .server_address
724724 url = 'http://' + socket_helper .HOST + ':' + str (addr [1 ])
725725 robots_url = url + "/robots.txt"
726726 parser = urllib .robotparser .RobotFileParser ()
727727 parser .set_url (url )
728728 parser .read ()
729729 self .assertFalse (parser .can_fetch ("*" , robots_url ))
730- self .assertFalse (parser .can_fetch ("*" , url + '/some/file.html' ))
731-
732- def testNotFound (self ):
733- self .server .return_code = 404
734- addr = self .server .server_address
735- url = f'http://{ socket_helper .HOST } :{ addr [1 ]} '
736- robots_url = url + "/robots.txt"
737- parser = urllib .robotparser .RobotFileParser ()
738- parser .set_url (url )
739- parser .read ()
740- self .assertTrue (parser .can_fetch ("*" , robots_url ))
741- self .assertTrue (parser .can_fetch ("*" , url + '/path/file.html' ))
742-
743- def testTeapot (self ):
744- self .server .return_code = 418
745- addr = self .server .server_address
746- url = f'http://{ socket_helper .HOST } :{ addr [1 ]} '
747- robots_url = url + "/robots.txt"
748- parser = urllib .robotparser .RobotFileParser ()
749- parser .set_url (url )
750- parser .read ()
751- self .assertTrue (parser .can_fetch ("*" , robots_url ))
752- self .assertTrue (parser .can_fetch ("*" , url + '/pot-1?milk-type=Cream' ))
753-
754- def testServiceUnavailable (self ):
755- self .server .return_code = 503
756- addr = self .server .server_address
757- url = f'http://{ socket_helper .HOST } :{ addr [1 ]} '
758- robots_url = url + "/robots.txt"
759- parser = urllib .robotparser .RobotFileParser ()
760- parser .set_url (url )
761- parser .read ()
762- self .assertFalse (parser .can_fetch ("*" , robots_url ))
763- self .assertFalse (parser .can_fetch ("*" , url + '/path/file.html' ))
764730
765731
766732@support .requires_working_socket ()
0 commit comments