Checking if a website is up via Python

浪尽此生 提交于 2019-11-28 16:11:09
Anthony Forloney

You could try to do this with getcode() from urllib

>>> print urllib.urlopen("http://www.stackoverflow.com").getcode()
>>> 200

EDIT: For more modern python, i.e. python3, use:

import urllib.request
print(urllib.request.urlopen("http://www.stackoverflow.com").getcode())
>>> 200

I think the easiest way to do it is by using Requests module.

import requests

def url_ok(url):
    r = requests.head(url)
    return r.status_code == 200

You can use httplib

import httplib
conn = httplib.HTTPConnection("www.python.org")
conn.request("HEAD", "/")
r1 = conn.getresponse()
print r1.status, r1.reason

prints

200 OK

Of course, only if www.python.org is up.

Evan Fosmark
import httplib
import socket
import re

def is_website_online(host):
    """ This function checks to see if a host name has a DNS entry by checking
        for socket info. If the website gets something in return, 
        we know it's available to DNS.
    """
    try:
        socket.gethostbyname(host)
    except socket.gaierror:
        return False
    else:
        return True


def is_page_available(host, path="/"):
    """ This function retreives the status code of a website by requesting
        HEAD data from the host. This means that it only requests the headers.
        If the host cannot be reached or something else goes wrong, it returns
        False.
    """
    try:
        conn = httplib.HTTPConnection(host)
        conn.request("HEAD", path)
        if re.match("^[23]\d\d$", str(conn.getresponse().status)):
            return True
    except StandardError:
        return None
from urllib.request import Request, urlopen
from urllib.error import URLError, HTTPError
req = Request("http://stackoverflow.com")
try:
    response = urlopen(req)
except HTTPError as e:
    print('The server couldn\'t fulfill the request.')
    print('Error code: ', e.code)
except URLError as e:
    print('We failed to reach a server.')
    print('Reason: ', e.reason)
else:
    print ('Website is working fine')

Works on Python 3

The HTTPConnection object from the httplib module in the standard library will probably do the trick for you. BTW, if you start doing anything advanced with HTTP in Python, be sure to check out httplib2; it's a great library.

If server if down, on python 2.7 x86 windows urllib have no timeout and program go to dead lock. So use urllib2

import urllib2
import socket

def check_url( url, timeout=5 ):
    try:
        return urllib2.urlopen(url,timeout=timeout).getcode() == 200
    except urllib2.URLError as e:
        return False
    except socket.timeout as e:
        print False


print check_url("http://google.fr")  #True 
print check_url("http://notexist.kc") #False     

If by up, you simply mean "the server is serving", then you could use cURL, and if you get a response than it's up.

I can't give you specific advice because I'm not a python programmer, however here is a link to pycurl http://pycurl.sourceforge.net/.

Hi this class can do speed and up test for your web page with this class:

 from urllib.request import urlopen
 from socket import socket
 import time


 def tcp_test(server_info):
     cpos = server_info.find(':')
     try:
         sock = socket()
         sock.connect((server_info[:cpos], int(server_info[cpos+1:])))
         sock.close
         return True
     except Exception as e:
         return False


 def http_test(server_info):
     try:
         # TODO : we can use this data after to find sub urls up or down    results
         startTime = time.time()
         data = urlopen(server_info).read()
         endTime = time.time()
         speed = endTime - startTime
         return {'status' : 'up', 'speed' : str(speed)}
     except Exception as e:
         return {'status' : 'down', 'speed' : str(-1)}


 def server_test(test_type, server_info):
     if test_type.lower() == 'tcp':
         return tcp_test(server_info)
     elif test_type.lower() == 'http':
         return http_test(server_info)

You may use requests library to find if website is up i.e. status code as 200

import requests
url = "https://www.google.com"
page = requests.get(url)
print (page.status_code) 

>> 200

Here's my solution using PycURL and validators

import pycurl, validators


def url_exists(url):
    """
    Check if the given URL really exists
    :param url: str
    :return: bool
    """
    if validators.url(url):
        c = pycurl.Curl()
        c.setopt(pycurl.NOBODY, True)
        c.setopt(pycurl.FOLLOWLOCATION, False)
        c.setopt(pycurl.CONNECTTIMEOUT, 10)
        c.setopt(pycurl.TIMEOUT, 10)
        c.setopt(pycurl.COOKIEFILE, '')
        c.setopt(pycurl.URL, url)
        try:
            c.perform()
            response_code = c.getinfo(pycurl.RESPONSE_CODE)
            c.close()
            return True if response_code < 400 else False
        except pycurl.error as err:
            errno, errstr = err
            raise OSError('An error occurred: {}'.format(errstr))
    else:
        raise ValueError('"{}" is not a valid url'.format(url))

Requests and httplib2 are great options:

# Using requests.
import requests
request = requests.get(value)
if request.status_code == 200:
    return True
return False

# Using httplib2.
import httplib2

try:
    http = httplib2.Http()
    response = http.request(value, 'HEAD')

    if int(response[0]['status']) == 200:
        return True
except:
    pass
return False

If using Ansible, you can use the fetch_url function:

from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url

module = AnsibleModule(
    dict(),
    supports_check_mode=True)

try:
    response, info = fetch_url(module, url)
    if info['status'] == 200:
        return True

except Exception:
    pass

return False
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!