From abc66171eb3e736c1edc31dc212c1d8bb87a00b2 Mon Sep 17 00:00:00 2001 From: Vincent Driessen Date: Mon, 13 Feb 2012 13:57:36 +0100 Subject: [PATCH] Simplify the count_words_at_url example. The long implementation derived attention a bit from the actual point of enqueuing here. --- README.md | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 7cac659..a7123b6 100644 --- a/README.md +++ b/README.md @@ -15,17 +15,11 @@ First, run a Redis server, of course: To put jobs on queues, you don't have to do anything special, just define your typically lengthy or blocking function: - import urllib2 + import requests def count_words_at_url(url): - f = urllib2.urlopen(url) - count = 0 - while True: - line = f.readline() - if not line: - break - count += len(line.split()) - return count + resp = requests.get(url) + return len(resp.text.split()) Then, create a RQ queue: