1 # -*- coding: iso-8859-1 -*- 2 """ 3 MoinMoin - SharedContent macro, based on the FeedReader macro 4 5 @copyright: 2008, 2012, 2013 by Paul Boddie <paul@boddie.org.uk> 6 @license: GNU GPL (v2 or later), see COPYING.txt for details. 7 """ 8 9 from DateSupport import getDateTimeFromISO8601, DateTime 10 from MoinMoin.Page import Page 11 from MoinRemoteSupport import * 12 from MoinSupport import parseMacroArguments 13 from MoinShare import getUpdateSources, getUpdatesFromPage, \ 14 getUpdatesFromStore, formatUpdate, \ 15 Update 16 from email.utils import parsedate 17 import xml.dom.pulldom 18 19 try: 20 from cStringIO import StringIO 21 except ImportError: 22 from StringIO import StringIO 23 24 Dependencies = ["time"] 25 26 MAX_ENTRIES = 5 27 ATOM_NS = "http://www.w3.org/2005/Atom" 28 29 # Utility functions. 30 31 def text(element): 32 nodes = [] 33 for node in element.childNodes: 34 if node.nodeType == node.TEXT_NODE: 35 nodes.append(node.nodeValue) 36 return "".join(nodes) 37 38 def children(element): 39 nodes = [] 40 for node in element.childNodes: 41 nodes.append(node.toxml()) 42 return "".join(nodes) 43 44 def unescape(text): 45 return text.replace("<", "<").replace(">", ">").replace("&", "&") 46 47 def linktext(element, feed_type): 48 if feed_type == "rss": 49 return text(element) 50 else: 51 return element.getAttribute("href") 52 53 def need_content(show_content, tagname): 54 return show_content in ("content", "description") and tagname in ("content", "description") 55 56 # Error classes. 57 58 class FeedError(Exception): 59 pass 60 61 class FeedMissingError(FeedError): 62 pass 63 64 class FeedContentTypeError(FeedError): 65 pass 66 67 # Feed retrieval. 68 69 def getUpdates(request, feed_url, max_entries, show_content): 70 71 """ 72 Using the given 'request', retrieve from 'feed_url' up to the given number 73 'max_entries' of update entries. The 'show_content' parameter can indicate 74 that a "summary" is to be obtained for each update, that the "content" of 75 each update is to be obtained (falling back to a summary if no content is 76 provided), or no content (indicated by a false value) is to be obtained. 77 78 A tuple of the form ((feed_type, channel_title, channel_link), updates) is 79 returned. 80 """ 81 82 feed_updates = [] 83 84 # Obtain the resource, using a cached version if appropriate. 85 86 max_cache_age = int(getattr(request.cfg, "moin_share_max_cache_age", "300")) 87 data = getCachedResource(request, feed_url, "MoinShare", "wiki", max_cache_age) 88 if not data: 89 raise FeedMissingError 90 91 # Interpret the cached feed. 92 93 feed = StringIO(data) 94 _url, content_type, _encoding, _metadata = getCachedResourceMetadata(feed) 95 96 if content_type not in ("application/atom+xml", "application/rss+xml", "application/xml"): 97 raise FeedContentTypeError 98 99 try: 100 # Parse each node from the feed. 101 102 channel_title = channel_link = None 103 104 feed_type = None 105 update = None 106 in_source = False 107 108 events = xml.dom.pulldom.parse(feed) 109 110 for event, value in events: 111 112 if not in_source and event == xml.dom.pulldom.START_ELEMENT: 113 tagname = value.localName 114 115 # Detect the feed type and items. 116 117 if tagname == "feed" and value.namespaceURI == ATOM_NS: 118 feed_type = "atom" 119 120 elif tagname == "rss": 121 feed_type = "rss" 122 123 # Detect items. 124 125 elif feed_type == "rss" and tagname == "item" or \ 126 feed_type == "atom" and tagname == "entry": 127 128 update = Update() 129 130 # Detect source declarations. 131 132 elif feed_type == "atom" and tagname == "source": 133 in_source = True 134 135 # Handle item elements. 136 137 elif tagname == "title": 138 events.expandNode(value) 139 if update: 140 update.title = text(value) 141 else: 142 channel_title = text(value) 143 144 elif tagname == "link": 145 events.expandNode(value) 146 if update: 147 update.link = linktext(value, feed_type) 148 else: 149 channel_link = linktext(value, feed_type) 150 151 elif show_content and ( 152 feed_type == "atom" and tagname in ("content", "summary") or 153 feed_type == "rss" and tagname == "description"): 154 155 events.expandNode(value) 156 157 # Obtain content where requested or, failing that, a 158 # summary. 159 160 if update and (need_content(show_content, tagname) or tagname == "summary" and not update.content): 161 if feed_type == "atom": 162 update.content_type = value.getAttribute("type") or "text" 163 164 # Normalise the content types and extract the 165 # content. 166 167 if update.content_type in ("xhtml", "application/xhtml+xml", "application/xml"): 168 update.content = children(value) 169 update.content_type = "application/xhtml+xml" 170 elif update.content_type in ("html", "text/html"): 171 update.content = text(value) 172 update.content_type = "text/html" 173 else: 174 update.content = text(value) 175 update.content_type = "text/plain" 176 else: 177 update.content_type = "text/html" 178 update.content = text(value) 179 180 elif feed_type == "atom" and tagname == "updated" or \ 181 feed_type == "rss" and tagname == "pubDate": 182 183 events.expandNode(value) 184 185 if update: 186 if feed_type == "atom": 187 value = getDateTimeFromISO8601(text(value)) 188 else: 189 value = DateTime(parsedate(text(value))) 190 update.updated = value 191 192 elif event == xml.dom.pulldom.END_ELEMENT: 193 tagname = value.localName 194 195 if feed_type == "rss" and tagname == "item" or \ 196 feed_type == "atom" and tagname == "entry": 197 198 feed_updates.append(update) 199 200 update = None 201 202 elif feed_type == "atom" and tagname == "source": 203 in_source = False 204 205 finally: 206 feed.close() 207 208 return (feed_type, channel_title, channel_link), feed_updates 209 210 # The macro itself. 211 212 def execute(macro, args): 213 request = macro.request 214 fmt = macro.formatter 215 _ = request.getText 216 217 source_pages = [] 218 show_content = None 219 max_entries = None 220 221 for arg, value in parseMacroArguments(args): 222 if arg == "sources": 223 source_pages.append(value) 224 elif arg == "show": 225 show_content = value.lower() 226 elif arg == "limit": 227 try: 228 max_entries = int(value) 229 except ValueError: 230 return fmt.text(_("SharedContent: limit must be set to the maximum number of entries to be shown")) 231 232 if not source_pages: 233 return fmt.text(_("SharedContent: at least one sources page must be specified")) 234 235 sources = {} 236 237 for source_page in source_pages: 238 sources.update(getUpdateSources(source_page, request)) 239 240 if not sources: 241 return fmt.text(_("SharedContent: at least one update source must be specified")) 242 243 show_content = show_content or False 244 max_entries = max_entries or MAX_ENTRIES 245 246 # Retrieve updates, classifying them as missing or bad and excluding them if 247 # appropriate. 248 249 updates = [] 250 feeds = [] 251 unspecified = [] 252 missing = [] 253 bad_content = [] 254 255 for source_name, source_parameters in sources.items(): 256 location = source_parameters.get("location") 257 if not location: 258 unspecified.append(source_name) 259 continue 260 261 try: 262 max_entries_for_feed = int(source_parameters["limit"]) 263 except (KeyError, ValueError): 264 max_entries_for_feed = None 265 266 # Retrieve updates from feeds. 267 268 if source_parameters.get("type") == "url": 269 try: 270 feed_info, feed_updates = getUpdates(request, location, max_entries_for_feed, show_content) 271 updates += feed_updates 272 feeds.append((location, feed_info)) 273 except FeedMissingError: 274 missing.append(location) 275 except FeedContentTypeError: 276 bad_content.append(location) 277 278 # Retrieve updates from pages. 279 280 elif source_parameters.get("type") == "page": 281 page = Page(request, location) 282 updates += getUpdatesFromPage(page, request) 283 284 # Build feed-equivalent information for the update source. 285 286 feeds.append(( 287 page.url(request, {"action" : "SharedUpdates", "doit" : "1"}), ( 288 "internal", _("Updates from page %s") % location, 289 page.url(request) 290 ) 291 )) 292 293 # Retrieve updates from message stores. 294 295 elif source_parameters.get("type") == "store": 296 page = Page(request, location) 297 updates += getUpdatesFromStore(page, request) 298 299 # Build feed-equivalent information for the update source. 300 301 feeds.append(( 302 page.url(request, {"action" : "SharedUpdates", "store" : "1", "doit" : "1"}), ( 303 "internal", _("Updates from message store on page %s") % location, 304 page.url(request) 305 ) 306 )) 307 308 # Prepare the output. 309 310 output = [] 311 append = output.append 312 313 # Show the updates. 314 315 if not show_content: 316 append(fmt.bullet_list(on=1)) 317 318 # NOTE: Permit configurable sorting. 319 320 updates.sort() 321 updates.reverse() 322 323 # Truncate the number of updates to the maximum number. 324 325 updates = updates[:max_entries] 326 327 for update in updates: 328 329 # Emit content where appropriate. 330 331 if show_content: 332 append(fmt.div(on=1, css_class="moinshare-update")) 333 334 if update.author: 335 append(fmt.div(on=1, css_class="moinshare-author")) 336 append(fmt.text(update.author)) 337 append(fmt.div(on=0)) 338 339 append(formatUpdate(update, request, fmt)) 340 341 append(fmt.div(on=1, css_class="moinshare-date")) 342 append(fmt.text(str(update.updated))) 343 append(fmt.div(on=0)) 344 345 append(fmt.div(on=0)) 346 347 # Or emit title and link information for items. 348 349 elif update.title and update.link: 350 append(fmt.listitem(on=1, css_class="moinshare-update")) 351 append(fmt.url(on=1, href=update.link)) 352 append(fmt.icon('www')) 353 append(fmt.text(" " + update.title)) 354 append(fmt.url(on=0)) 355 append(fmt.listitem(on=0)) 356 357 if not show_content: 358 append(fmt.bullet_list(on=0)) 359 360 # Show the feeds. 361 362 for feed_url, (feed_type, channel_title, channel_link) in feeds: 363 if channel_title and channel_link: 364 append(fmt.paragraph(on=1, css_class="moinshare-feed")) 365 append(fmt.url(on=1, href=channel_link)) 366 append(fmt.text(channel_title)) 367 append(fmt.url(on=0)) 368 append(fmt.text(" ")) 369 append(fmt.url(on=1, href=feed_url)) 370 append(fmt.icon('rss')) 371 append(fmt.url(on=0)) 372 append(fmt.paragraph(on=0)) 373 374 # Show errors. 375 376 for feed_url in missing: 377 append(fmt.paragraph(on=1, css_class="moinshare-missing-feed-error")) 378 append(fmt.text(_("SharedContent: updates could not be retrieved for %s") % feed_url)) 379 append(fmt.paragraph(on=0)) 380 381 for feed_url in bad_content: 382 append(fmt.paragraph(on=1, css_class="moinshare-content-type-feed-error")) 383 return fmt.text(_("SharedContent: updates for %s were not provided in Atom or RSS format") % feed_url) 384 append(fmt.paragraph(on=0)) 385 386 return ''.join(output) 387 388 # vim: tabstop=4 expandtab shiftwidth=4