summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorroot <root@wlan-5-141.nay.redhat.com>2011-04-29 17:05:14 +0800
committerroot <root@wlan-5-141.nay.redhat.com>2011-04-29 17:05:14 +0800
commitd465c6fb62eb202f5e2d885ff9a453d9f620b362 (patch)
tree008fa23b3b84d75a06314ed51ff482c86224f647
parent519b8f67671fe3a5d766d34aaeaa6032bf100800 (diff)
downloadrepo-d465c6fb62eb202f5e2d885ff9a453d9f620b362.tar.gz
repo-d465c6fb62eb202f5e2d885ff9a453d9f620b362.tar.xz
repo-d465c6fb62eb202f5e2d885ff9a453d9f620b362.zip
A draft for test case page of wiki converted to nitrate format xml file
-rwxr-xr-xwiki_to_nitrate_xml.py201
1 files changed, 201 insertions, 0 deletions
diff --git a/wiki_to_nitrate_xml.py b/wiki_to_nitrate_xml.py
new file mode 100755
index 0000000..b473eab
--- /dev/null
+++ b/wiki_to_nitrate_xml.py
@@ -0,0 +1,201 @@
+#!/usr/bin/python
+
+import os
+import sys
+import optparse
+import re
+import time, datetime
+import xml.etree.ElementTree as ET
+import xml.dom.minidom
+
+try:
+ from simplemediawiki import MediaWiki
+except ImportError:
+ print "Unable to import simplemediawiki. Is python-simpemediawiki installed?"
+ sys.exit(1)
+
+def parse_args():
+ '''Set up the option parser'''
+ parser = optparse.OptionParser(usage="%prog [options] <action> [options]")
+ parser.add_option('-v', '--verbose', action='store_true', default=False,
+ help='Enable more verbose output')
+ parser.add_option('-d', '--debug', action='store_true', default=False,
+ help='Enable debugging output')
+ parser.add_option('--url', action='store', default='https://fedoraproject.org/w/api.php',
+ help='API URL')
+
+ #general
+ optgrp = optparse.OptionGroup(parser, "General options")
+ optgrp.add_option('-l', '--limit', action='store', default=5, type="int",
+ help='Limit recursion depth (%default)')
+ parser.add_option_group(optgrp)
+
+ # list_categorymembers
+ optgrp = optparse.OptionGroup(parser, "Options for 'categorymembers' command:")
+ optgrp.add_option('-c', '--category', dest="categories",
+ default=[], action="append",
+ help='Wiki category name to query (accepts multiple values) - Not Finished Yet!')
+ parser.add_option_group(optgrp)
+
+ # migration
+ optgrp = optparse.OptionGroup(parser, "Options for 'migration':")
+ optgrp.add_option('-t', '--title',
+ default='', action='store',
+ help='test case page title to convert to Nitrate xml file')
+ parser.add_option_group(optgrp)
+
+ (opts, args) = parser.parse_args()
+
+ if len(args) == 0:
+ parser.error("No action specified")
+ else:
+ action = args[0]
+
+ # Validate inputs
+ if action == 'categorymembers':
+ if len(opts.categories) == 0:
+ parser.error("Must specify at least one category (-c|--category)")
+ elif action == 'migration':
+ if opts.title == '':
+ parser.error("Must specify a page (-t|--title)")
+
+ return (opts, action)
+
+
+def parse(wiki, page):
+ '''Parse a page and return content'''
+
+ # Build query arguments and call wiki
+ query = dict(action='query',
+ prop='revisions',
+ titles=page,
+ rvprop='content')
+ if opts.debug: print query
+ response = wiki.call(query)
+ for page in response.get('query',{}).get('pages',{}).values():
+ revs = page.get('revisions',[])
+ for t in revs:
+ return t.get('*','')
+ return ''
+
+
+def list_categorymembers(wiki, cat_page, limit=5):
+ '''Return a list of pages belonging to category page'''
+ # Add 'Category:' prefix if not given
+ if not cat_page.startswith("Category:"):
+ cat_page = "Category:%s" % cat_page
+
+ # Build query arguments and call wiki
+ query = dict(action='query',
+ list='categorymembers',
+ cmtitle=cat_page)
+ if opts.debug: print query
+ response = wiki.call(query)
+
+ members = [entry.get('title') for entry in response.get('query',{}).get('categorymembers',{}) if entry.has_key('title')]
+
+ # Determine whether we need to recurse
+ idx = 0
+ while True:
+ if idx >= len(members) or limit <= 0:
+ break
+ # Recurse?
+ if members[idx].startswith('Category:') and limit > 0:
+ members.extend(list_categorymembers(wiki, members[idx], limit-1))
+ members.remove(members[idx]) # remove Category from list
+ else:
+ idx += 1
+
+ return members
+
+def extract(s, titles):
+ w_dsc = s.find('|description=')
+ w_setup = s.find('|setup=')
+ w_action = s.find('|actions=')
+ w_result = s.find('|results=')
+ w_resultend = s.find('}}')
+ start = [w_resultend+1, w_resultend+1]
+ tag = []
+ w_tagstart = []
+ w_tagend = []
+ i = 0
+ while True:
+ #saved '[[' and ']]' to w_tagstart and w_tagend separately.
+ w_tagstart.append(s.find('[[', start[0]))
+ w_tagend.append(s.find(']]', start[1]))
+ if w_tagstart[i] == -1:
+ break
+ #saved category names to tag.
+ tag.append(s[(w_tagstart[i]+len('[[Category:')):w_tagend[i]])
+ start[0] = w_tagstart[i] + 1
+ start[1] = w_tagend[i] + 1
+ i += 1
+ table = {}
+ table['title'] = titles
+ if w_setup == -1:
+ table['description'] = s[(w_dsc+len('|description=')):w_action]
+ table['setup'] = ''
+ else:
+ table['description'] = s[(w_dsc+len('|description=')):w_setup]
+ table['setup'] = s[(w_setup+len('|setup=')):w_action]
+ table['actions'] = s[(w_action+len('|actions=')):w_result]
+ table['results'] = s[(w_result+len('|results=')):w_resultend]
+ table['tag'] = tag
+ return table
+
+def nitratexml(table):
+ #generate Nitrate format xml for wiki test case
+ root = ET.Element("testopia")
+ root.attrib["version"] = "1.1"
+ head = ET.SubElement(root, "testcase")
+ title = ET.SubElement(head, "summary")
+ title.text = table['title']
+ title = ET.SubElement(head, "categoryname")
+ title = ET.SubElement(head, "defaulttester")
+ title = ET.SubElement(head, "notes")
+ title.text = table['description']
+ title = ET.SubElement(head, "testplan_reference")
+ title.attrib["type"] = "xml_description"
+ title.text = "Fedora 15 Install Test Plan"
+ title = ET.SubElement(head, "action")
+ title.text = table['actions']
+ title = ET.SubElement(head, "expectedresults")
+ title.text = table['results']
+ title = ET.SubElement(head, "setup")
+ title.text = table['setup']
+ title = ET.SubElement(head, "breakdown")
+ title = ET.SubElement(head, "tag")
+ title.text = table['tag'][0]
+
+ tree = ET.ElementTree(root)
+ tree.write("output.xml", encoding="UTF-8", xml_declaration=True)
+ string = ET.tostring(root)
+ xml_dom = xml.dom.minidom.parseString(string)
+ pretty_xml_as_string = xml_dom.toprettyxml()
+ return pretty_xml_as_string
+
+if __name__ == "__main__":
+ (opts,action) = parse_args()
+
+ # Create mediawiki handle
+ wiki = MediaWiki(opts.url)
+
+ if action == 'categorymembers':
+ for cat_page in opts.categories:
+ pages = list_categorymembers(wiki, cat_page, opts.limit)
+ if pages:
+ print "\n".join(pages)
+ else:
+ print "No data found for '%s'" % cat_page
+
+ elif action == 'migration':
+ s = parse(wiki, opts.title)
+ table = extract(s, opts.title)
+ if opts.debug:
+ for key in table.keys():
+ print key, '\t', table[key]
+ pretty_xml_print = nitratexml(table)
+ print pretty_xml_print
+ else:
+ print "Unknown action requested '%s'" % action
+