Difference between revisions of "Eventlet/Examples"

From Second Life Wiki
Jump to: navigation, search
(web crawler: reword a bit)
(Wewt python syntax highlighting ftw.)
Line 7: Line 7:
 
This is a simple server that listens on port 6000 and simply echoes back every input line it receives.  Connect to it with:  <code>telnet localhost 6000</code>  Terminate your connection by quitting telnet (typically Ctrl-] and then 'quit')
 
This is a simple server that listens on port 6000 and simply echoes back every input line it receives.  Connect to it with:  <code>telnet localhost 6000</code>  Terminate your connection by quitting telnet (typically Ctrl-] and then 'quit')
  
from eventlet import api
+
<python>from eventlet import api
+
 
def handle_socket(client):
+
def handle_socket(client):
    print "client connected"
+
    print "client connected"
    while True:
+
    while True:
        # pass through every non-eof line
+
      # pass through every non-eof line
        x = client.readline()
+
      x = client.readline()
        if not x: break
+
      if not x: break
        client.write(x)
+
      client.write(x)
        print "echoed", x
+
      print "echoed", x
    print "client disconnected"
+
  print "client disconnected"
+
 
# server socket listening on port 6000
+
# server socket listening on port 6000
server = api.tcp_listener(('0.0.0.0', 6000))
+
server = api.tcp_listener(('0.0.0.0', 6000))
while True:
+
while True:
    new_sock, address = server.accept()
+
  new_sock, address = server.accept()
    # handle every new connection with a new coroutine
+
  # handle every new connection with a new coroutine
    api.spawn(handle_socket, new_sock)
+
  api.spawn(handle_socket, new_sock)
+
 
server.close()
+
server.close()</python>
  
 
== web crawler ==
 
== web crawler ==
Line 32: Line 32:
 
This is a simple web "crawler" that fetches a bunch of urls using a coroutine pool.  It has as much concurrency (i.e. pages being fetched simultaneously) as coroutines in the pool.
 
This is a simple web "crawler" that fetches a bunch of urls using a coroutine pool.  It has as much concurrency (i.e. pages being fetched simultaneously) as coroutines in the pool.
  
urls = ["http://www.google.com/intl/en_ALL/images/logo.gif",
+
<python>urls = ["http://www.google.com/intl/en_ALL/images/logo.gif",
        "http://wiki.secondlife.com/w/images/secondlife.jpg",
+
        "http://wiki.secondlife.com/w/images/secondlife.jpg",
        "http://us.i1.yimg.com/us.yimg.com/i/ww/beta/y3.gif"]
+
        "http://us.i1.yimg.com/us.yimg.com/i/ww/beta/y3.gif"]
 +
 
 +
import time
 +
from eventlet import coros, httpc, util
 
   
 
   
import time
+
# replace socket with a cooperative coroutine socket because httpc
from eventlet import coros, httpc, util
+
# uses httplib, which uses socket.  Removing this serializes the http
 
+
# requests, because the standard socket is blocking.
# replace socket with a cooperative coroutine socket because httpc
+
util.wrap_socket_with_coroutine_socket()
# uses httplib, which uses socket.  Removing this serializes the http
+
 
# requests, because the standard socket is blocking.
+
def fetch(url):
util.wrap_socket_with_coroutine_socket()
+
    # we could do something interesting with the result, but this is
+
    # example code, so we'll just report that we did it
def fetch(url):
+
    print "%s fetching %s" % (time.asctime(), url)
    # we could do something interesting with the result, but this is
+
    httpc.get(url)
    # example code, so we'll just report that we did it
+
    print "%s fetched %s" % (time.asctime(), url)
    print "%s fetching %s" % (time.asctime(), url)
+
 
    httpc.get(url)
+
pool = coros.CoroutinePool(max_size=4)
    print "%s fetched %s" % (time.asctime(), url)
+
waiters = []
+
for url in urls:
pool = coros.CoroutinePool(max_size=4)
+
    waiters.append(pool.execute(fetch, url))
waiters = []
+
 
for url in urls:
+
# wait for all the coroutines to come back before exiting the process
    waiters.append(pool.execute(fetch, url))
+
for waiter in waiters:
+
    waiter.wait()</python>
# wait for all the coroutines to come back before exiting the process
+
for waiter in waiters:
+
    waiter.wait()
+

Revision as of 16:58, 3 January 2008

Examples

These are short examples demonstrating the use of Eventlet. They are also included in the examples directory of the source.

echo server

This is a simple server that listens on port 6000 and simply echoes back every input line it receives. Connect to it with: telnet localhost 6000 Terminate your connection by quitting telnet (typically Ctrl-] and then 'quit')

<python>from eventlet import api

def handle_socket(client):

   print "client connected"
   while True:
      # pass through every non-eof line
      x = client.readline()
      if not x: break
      client.write(x)
      print "echoed", x
  print "client disconnected"
  1. server socket listening on port 6000

server = api.tcp_listener(('0.0.0.0', 6000)) while True:

  new_sock, address = server.accept()
  # handle every new connection with a new coroutine
  api.spawn(handle_socket, new_sock)

server.close()</python>

web crawler

This is a simple web "crawler" that fetches a bunch of urls using a coroutine pool. It has as much concurrency (i.e. pages being fetched simultaneously) as coroutines in the pool.

<python>urls = ["http://www.google.com/intl/en_ALL/images/logo.gif",

       "http://wiki.secondlife.com/w/images/secondlife.jpg",
       "http://us.i1.yimg.com/us.yimg.com/i/ww/beta/y3.gif"]

import time from eventlet import coros, httpc, util

  1. replace socket with a cooperative coroutine socket because httpc
  2. uses httplib, which uses socket. Removing this serializes the http
  3. requests, because the standard socket is blocking.

util.wrap_socket_with_coroutine_socket()

def fetch(url):

   # we could do something interesting with the result, but this is
   # example code, so we'll just report that we did it
   print "%s fetching %s" % (time.asctime(), url)
   httpc.get(url)
   print "%s fetched %s" % (time.asctime(), url)

pool = coros.CoroutinePool(max_size=4) waiters = [] for url in urls:

   waiters.append(pool.execute(fetch, url))
  1. wait for all the coroutines to come back before exiting the process

for waiter in waiters:

   waiter.wait()</python>