Difference between revisions of "Eventlet/Examples"

From Second Life Wiki
Jump to navigation Jump to search
m (→‎echo server: fixed weird formatting)
 
(3 intermediate revisions by one other user not shown)
Line 7: Line 7:
This is a simple server that listens on port 6000 and simply echoes back every input line it receives.  Connect to it with:  <code>telnet localhost 6000</code>  Terminate your connection by quitting telnet (typically Ctrl-] and then 'quit')
This is a simple server that listens on port 6000 and simply echoes back every input line it receives.  Connect to it with:  <code>telnet localhost 6000</code>  Terminate your connection by quitting telnet (typically Ctrl-] and then 'quit')


from eventlet import api
<syntaxhighlight lang="python">from eventlet import api
 
def handle_socket(client):
def handle_socket(client):
    print "client connected"
    print "client connected"
    while True:
    fp = client.makefile()
        # pass through every non-eof line
    while True:
        x = client.readline()
      # pass through every non-eof line
        if not x: break
      x = fp.readline()
        client.write(x)
      if not x: break
        print "echoed", x
      fp.write(x)
    print "client disconnected"
      print "echoed", x
  print "client disconnected"
# server socket listening on port 6000
 
server = api.tcp_listener(('0.0.0.0', 6000))
# server socket listening on port 6000
while True:
server = api.tcp_listener(('0.0.0.0', 6000))
    new_sock, address = server.accept()
while True:
    # handle every new connection with a new coroutine
  new_sock, address = server.accept()
    api.spawn(handle_socket, new_sock)
  # handle every new connection with a new coroutine
  api.spawn(handle_socket, new_sock)
server.close()
 
server.close()</syntaxhighlight>


== web crawler ==
== web crawler ==


This is a simple web "crawler" that fetches a bunch of urls using a coroutine pool.  It fetches as many urls at time as coroutines in the pool.
This is a simple web "crawler" that fetches a bunch of urls using a coroutine pool.  It has as much concurrency (i.e. pages being fetched simultaneously) as coroutines in the pool.


urls = ["http://www.google.com/intl/en_ALL/images/logo.gif",
<syntaxhighlight lang="python">urls = ["http://www.google.com/intl/en_ALL/images/logo.gif",
        "http://wiki.secondlife.com/w/images/secondlife.jpg",
        "http://wiki.secondlife.com/w/images/secondlife.jpg",
        "http://us.i1.yimg.com/us.yimg.com/i/ww/beta/y3.gif"]
        "http://us.i1.yimg.com/us.yimg.com/i/ww/beta/y3.gif"]
 
import time
from eventlet import coros, httpc, util
   
   
import time
# replace socket with a cooperative coroutine socket because httpc
from eventlet import coros, httpc, util
# uses httplib, which uses socket.  Removing this serializes the http
 
# requests, because the standard socket is blocking.
# replace socket with a cooperative coroutine socket because httpc
util.wrap_socket_with_coroutine_socket()
# uses httplib, which uses socket.  Removing this serializes the http
 
# requests, because the standard socket is blocking.
def fetch(url):
util.wrap_socket_with_coroutine_socket()
    # we could do something interesting with the result, but this is
    # example code, so we'll just report that we did it
def fetch(url):
    print "%s fetching %s" % (time.asctime(), url)
    # we could do something interesting with the result, but this is
    httpc.get(url)
    # example code, so we'll just report that we did it
    print "%s fetched %s" % (time.asctime(), url)
    print "%s fetching %s" % (time.asctime(), url)
 
    httpc.get(url)
pool = coros.CoroutinePool(max_size=4)
    print "%s fetched %s" % (time.asctime(), url)
waiters = []
for url in urls:
pool = coros.CoroutinePool(max_size=4)
    waiters.append(pool.execute(fetch, url))
waiters = []
 
for url in urls:
# wait for all the coroutines to come back before exiting the process
    waiters.append(pool.execute(fetch, url))
for waiter in waiters:
    waiter.wait()</syntaxhighlight>
# wait for all the coroutines to come back before exiting the process
for waiter in waiters:
    waiter.wait()

Latest revision as of 08:40, 27 June 2017

Examples

These are short examples demonstrating the use of Eventlet. They are also included in the examples directory of the source.

echo server

This is a simple server that listens on port 6000 and simply echoes back every input line it receives. Connect to it with: telnet localhost 6000 Terminate your connection by quitting telnet (typically Ctrl-] and then 'quit')

from eventlet import api

def handle_socket(client):
    print "client connected"
    fp = client.makefile()
    while True:
       # pass through every non-eof line
       x = fp.readline()
       if not x: break
       fp.write(x)
       print "echoed", x
   print "client disconnected"

# server socket listening on port 6000
server = api.tcp_listener(('0.0.0.0', 6000))
while True:
   new_sock, address = server.accept()
   # handle every new connection with a new coroutine
   api.spawn(handle_socket, new_sock)

server.close()

web crawler

This is a simple web "crawler" that fetches a bunch of urls using a coroutine pool. It has as much concurrency (i.e. pages being fetched simultaneously) as coroutines in the pool.

urls = ["http://www.google.com/intl/en_ALL/images/logo.gif",
        "http://wiki.secondlife.com/w/images/secondlife.jpg",
        "http://us.i1.yimg.com/us.yimg.com/i/ww/beta/y3.gif"]

import time
from eventlet import coros, httpc, util
 
# replace socket with a cooperative coroutine socket because httpc
# uses httplib, which uses socket.  Removing this serializes the http
# requests, because the standard socket is blocking.
util.wrap_socket_with_coroutine_socket()

def fetch(url):
    # we could do something interesting with the result, but this is
    # example code, so we'll just report that we did it
    print "%s fetching %s" % (time.asctime(), url)
    httpc.get(url)
    print "%s fetched %s" % (time.asctime(), url)

pool = coros.CoroutinePool(max_size=4)
waiters = []
for url in urls:
    waiters.append(pool.execute(fetch, url))

# wait for all the coroutines to come back before exiting the process
for waiter in waiters:
    waiter.wait()