4
4
import irc3
5
5
import datetime
6
6
from irc3 .compat import asyncio
7
- from concurrent .futures import ThreadPoolExecutor
8
7
9
8
__doc__ = '''
10
9
==========================================
22
21
irc3.plugins.feeds
23
22
24
23
[irc3.plugins.feeds]
25
- channels = #irc3 # global channel to notify
26
- delay = 5 # delay to check feeds
27
- directory = ~/.irc3/feeds # directory to store feeds
24
+ channels = #irc3 # global channel to notify
25
+ delay = 5 # delay to check feeds in minutes
26
+ directory = ~/.irc3/feeds # directory to store feeds
28
27
hook = irc3.plugins.feeds.default_hook # dotted name to a callable
29
28
fmt = [{name}] {entry.title} - {entry.link} # formatter
30
29
34
33
github/irc3.fmt = [{feed.name}] New commit: {entry.title} - {entry.link}
35
34
# custom channels
36
35
github/irc3.channels = #irc3dev #irc3
37
- # custom delay
36
+ # custom delay in minutes
38
37
github/irc3.delay = 10
39
38
40
39
Hook is a dotted name refering to a callable (function or class) wich take a
63
62
64
63
'''
65
64
65
+ HEADERS = {
66
+ 'User-Agent' : 'python-aiohttp/irc3/feeds' ,
67
+ 'Cache-Control' : 'max-age=0' ,
68
+ 'Pragma' : 'no-cache' ,
69
+ }
70
+
66
71
67
72
def default_hook (entries ):
68
73
"""Default hook called for each entry"""
@@ -76,21 +81,6 @@ def dispatcher(messages):
76
81
return dispatcher
77
82
78
83
79
- def fetch (args ):
80
- """fetch a feed"""
81
- session = args ['session' ]
82
- for feed , filename in zip (args ['feeds' ], args ['filenames' ]):
83
- try :
84
- resp = session .get (feed , timeout = 5 )
85
- content = resp .content
86
- except Exception : # pragma: no cover
87
- pass
88
- else :
89
- with open (filename , 'wb' ) as fd :
90
- fd .write (content )
91
- return args ['name' ]
92
-
93
-
94
84
ISO_FORMAT = "%Y-%m-%dT%H:%M:%S"
95
85
96
86
@@ -102,7 +92,7 @@ def parse(feedparser, args):
102
92
103
93
for filename in args ['filenames' ]:
104
94
try :
105
- with open (filename + '.updated' ) as fd :
95
+ with open (filename + '.updated' , encoding = "UTF-8" ) as fd :
106
96
updated = datetime .datetime .strptime (
107
97
fd .read ()[:len ("YYYY-MM-DDTHH:MM:SS" )], ISO_FORMAT
108
98
)
@@ -140,14 +130,6 @@ def parse(feedparser, args):
140
130
class Feeds :
141
131
"""Feeds plugin"""
142
132
143
- PoolExecutor = ThreadPoolExecutor
144
-
145
- headers = {
146
- 'User-Agent' : 'python-requests/irc3/feeds' ,
147
- 'Cache-Control' : 'max-age=0' ,
148
- 'Pragma' : 'no-cache' ,
149
- }
150
-
151
133
def __init__ (self , bot ):
152
134
bot .feeds = self
153
135
self .bot = bot
@@ -177,7 +159,6 @@ def __init__(self, bot):
177
159
fmt = config .get ('fmt' , '[{feed.name}] {entry.title} {entry.link}' ),
178
160
delay = delay ,
179
161
channels = config .get ('channels' , '' ),
180
- headers = self .headers ,
181
162
time = 0 ,
182
163
)
183
164
@@ -208,7 +189,16 @@ def __init__(self, bot):
208
189
209
190
def connection_made (self ):
210
191
"""Initialize checkings"""
211
- self .bot .loop .call_later (10 , self .update )
192
+ self .bot .create_task (self .periodically_update ())
193
+
194
+ async def periodically_update (self ):
195
+ """After a connection has been made, call update feeds periodically."""
196
+ if not self .aiohttp or not self .feedparser :
197
+ return
198
+ await asyncio .sleep (10 )
199
+ while True :
200
+ await self .update ()
201
+ await asyncio .sleep (self .delay )
212
202
213
203
def imports (self ):
214
204
"""show some warnings if needed"""
@@ -219,15 +209,14 @@ def imports(self):
219
209
self .bot .log .critical ('feedparser is not installed' )
220
210
self .feedparser = None
221
211
try :
222
- import requests
212
+ import aiohttp
223
213
except ImportError : # pragma: no cover
224
- self .bot .log .critical ('requests is not installed' )
225
- self .session = None
214
+ self .bot .log .critical ('aiohttp is not installed' )
215
+ self .aiohttp = None
226
216
else :
227
- self .session = requests .Session ()
228
- self .session .headers .update (self .headers )
217
+ self .aiohttp = aiohttp
229
218
230
- def parse (self , * args ):
219
+ def parse (self ):
231
220
"""parse pre-fetched feeds and notify new entries"""
232
221
entries = []
233
222
for feed in self .feeds .values ():
@@ -239,34 +228,37 @@ def messages():
239
228
if entry :
240
229
feed = entry .feed
241
230
message = feed ['fmt' ].format (feed = feed , entry = entry )
242
- for c in feed ['channels' ]:
243
- yield c , message
231
+ for channel in feed ['channels' ]:
232
+ yield channel , message
244
233
245
234
self .dispatcher (messages ())
246
235
247
- def update_time (self , future ):
248
- name = future .result ()
249
- self .bot .log .debug ('Feed %s fetched' , name )
250
- feed = self .feeds [name ]
251
- feed ['time' ] = time .time ()
252
-
253
- def update (self ):
236
+ async def update (self ):
254
237
"""update feeds"""
255
- loop = self .bot .loop
256
- loop .call_later (self .delay , self .update )
257
-
258
238
now = time .time ()
259
- session = self .session
260
- feeds = [dict (f , session = session ) for f in self .feeds .values ()
261
- if f ['time' ] < now - f ['delay' ]]
262
- if feeds :
263
- self .bot .log .info ('Fetching feeds %s' ,
264
- ', ' .join ([f ['name' ] for f in feeds ]))
265
- tasks = []
266
- for feed in feeds :
267
- task = loop .run_in_executor (None , fetch , feed )
268
- task .add_done_callback (self .update_time )
269
- tasks .append (task )
270
- task = self .bot .create_task (
271
- asyncio .wait (tasks , timeout = len (feeds ) * 2 , loop = loop ))
272
- task .add_done_callback (self .parse )
239
+ feeds = [feed for feed in self .feeds .values ()
240
+ if feed ['time' ] < now - feed ['delay' ]]
241
+ if not feeds :
242
+ return
243
+ self .bot .log .info ('Fetching feeds %s' ,
244
+ ', ' .join ([f ['name' ] for f in feeds ]))
245
+ timeout = self .aiohttp .ClientTimeout (total = 5 )
246
+ async with self .aiohttp .ClientSession (timeout = timeout ) as session :
247
+ await asyncio .gather (
248
+ * [self .fetch (feed , session ) for feed in feeds ]
249
+ )
250
+ self .parse ()
251
+
252
+ async def fetch (self , feed , session ):
253
+ """fetch a feed"""
254
+ for url , filename in zip (feed ['feeds' ], feed ['filenames' ]):
255
+ try :
256
+ async with session .get (url , headers = HEADERS ) as resp :
257
+ with open (filename , 'wb' ) as file :
258
+ file .write (await resp .read ())
259
+ except Exception : # pragma: no cover
260
+ self .bot .log .exception (
261
+ "Exception while fetching feed %s" , feed ['name' ]
262
+ )
263
+ self .bot .log .debug ('Feed %s fetched' , feed ['name' ])
264
+ feed ['time' ] = time .time ()
0 commit comments