3 from . import text as _text
6 class Aggregator (list):
7 r"""An iCalendar feed aggregator
9 Figure out where the example feeds are located, relative to the
10 directory from which you run this doctest (i.e., the project's
14 >>> root_dir = os.curdir
15 >>> data_dir = os.path.abspath(os.path.join(root_dir, 'test', 'data'))
16 >>> base_url = 'file://{}'.format(data_dir.replace(os.sep, '/'))
18 >>> from .feed import Feed
20 You can set processing hooks to analyze and manipulate feeds as
23 >>> processors = [lambda feed: print("I'm processing {!r}".format(feed))]
26 ... prodid='-//pycalendar//NONSGML testing//EN',
28 ... Feed(url='{}/{}'.format(base_url, name))
29 ... for name in ['geohash.ics',]],
30 ... processors=processors,
32 >>> a # doctest: +ELLIPSIS
33 [<Feed url:file://.../test/data/geohash.ics>]
34 >>> a.fetch() # doctest: +ELLIPSIS
35 I'm processing <Feed url:file://.../test/data/geohash.ics>
37 Generate aggregate calendars with the ``.write`` method.
40 >>> stream = io.StringIO()
41 >>> a.write(stream=stream)
42 >>> value = stream.getvalue()
43 >>> value # doctest: +ELLIPSIS
44 'BEGIN:VCALENDAR\r\nVERSION:2.0\r\n...END:VCALENDAR\r\n'
45 >>> print(value.replace('\r\n', '\n'))
48 PRODID:-//pycalendar//NONSGML testing//EN
50 UID:2013-06-30@geohash.invalid
51 DTSTAMP:2013-06-30T00:00:00Z
52 DTSTART;VALUE=DATE:20130630
53 DTEND;VALUE=DATE:20130701
54 SUMMARY:XKCD geohashing\, Boston graticule
55 URL:http://xkcd.com/426/
56 LOCATION:Snow Hill\, Dover\, Massachusetts
57 GEO:42.226663,-71.28676
62 def __init__(self, prodid, version='2.0', feeds=None, processors=None):
63 super(Aggregator, self).__init__()
65 self.version = version
70 self.processors = processors
75 for processor in self.processors:
78 def write(self, stream):
79 stream.write('BEGIN:VCALENDAR\r\n')
80 stream.write('VERSION:{}\r\n'.format(_text.escape(self.version)))
81 stream.write('PRODID:{}\r\n'.format(_text.escape(self.prodid)))
89 for entry in feed.get(key, []):
90 entry.write(stream=stream)
91 stream.write('END:VCALENDAR\r\n')