1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
|
#!/usr/bin/python
import os
import sys
import optparse
import re
import time, datetime
import xml.etree.ElementTree as ET
import xml.dom.minidom
from BeautifulSoup import BeautifulSoup
try:
from simplemediawiki import MediaWiki
except ImportError:
print "Unable to import simplemediawiki. Is python-simpemediawiki installed?"
sys.exit(1)
def parse_args():
'''Set up the option parser'''
parser = optparse.OptionParser(usage="%prog [options] <action> [options]")
parser.add_option('-v', '--verbose', action='store_true', default=False,
help='Enable more verbose output')
parser.add_option('-d', '--debug', action='store_true', default=False,
help='Enable debugging output')
parser.add_option('--url', action='store', default='https://fedoraproject.org/w/api.php',
help='API URL')
#general
optgrp = optparse.OptionGroup(parser, "General options")
optgrp.add_option('-l', '--limit', action='store', default=5, type="int",
help='Limit recursion depth (%default)')
parser.add_option_group(optgrp)
# list_categorymembers
optgrp = optparse.OptionGroup(parser, "Options for 'categorymembers' command:")
optgrp.add_option('-c', '--category', dest="categories",
default=[], action="append",
help='Wiki category name to query (accepts multiple values) - Not Finished Yet!')
parser.add_option_group(optgrp)
# migration
optgrp = optparse.OptionGroup(parser, "Options for 'migration':")
optgrp.add_option('-t', '--title',
default='', action='store',
help='test case page title to convert to Nitrate xml file')
parser.add_option_group(optgrp)
(opts, args) = parser.parse_args()
if len(args) == 0:
parser.error("No action specified")
else:
action = args[0]
# Validate inputs
if action == 'categorymembers':
if len(opts.categories) == 0:
parser.error("Must specify at least one category (-c|--category)")
elif action == 'migration':
if opts.title == '':
parser.error("Must specify a page (-t|--title)")
return (opts, action)
def parse(wiki, page):
'''Parse a page and return content'''
query = dict(action='parse',
page=page)
if opts.debug: print query
response = wiki.call(query)
if opts.debug: print response
return response.get('parse',{})
def list_categorymembers(wiki, cat_page, limit=5):
'''Return a list of pages belonging to category page'''
# Add 'Category:' prefix if not given
if not cat_page.startswith("Category:"):
cat_page = "Category:%s" % cat_page
# Build query arguments and call wiki
query = dict(action='query',
list='categorymembers',
cmtitle=cat_page)
if opts.debug: print query
response = wiki.call(query)
members = [entry.get('title') for entry in response.get('query',{}).get('categorymembers',{}) if entry.has_key('title')]
# Determine whether we need to recurse
idx = 0
while True:
if idx >= len(members) or limit <= 0:
break
# Recurse?
if members[idx].startswith('Category:') and limit > 0:
members.extend(list_categorymembers(wiki, members[idx], limit-1))
members.remove(members[idx]) # remove Category from list
else:
idx += 1
return members
def extract_html(string, titles):
'''extract wiki contents in html format and cache to table'''
s_text = string.get('text',{}).get('*','')
s_tag = string.get('categories',{})
tag = []
for t in s_tag:
tag.append(t.get('*',''))
soup = BeautifulSoup(''.join(s_text))
table = {}
table['title'] = titles
if soup.find(id='Description') == None:
table['description'] = ''
else:
table['description'] = soup.find(id='Description').findNext('p')
if soup.find(id='Setup') == None:
table['setup'] = ''
else:
table['setup'] = soup.find(id='Setup').findNext('ol')
table['actions'] = soup.find(id='How_to_test').findNext('ol')
table['results'] = soup.find(id='Expected_Results').findNext('ol')
table['tag'] = tag
return table
def nitratexml(table):
'''generate Nitrate format xml from wiki test case'''
root = ET.Element("testopia")
root.attrib["version"] = "1.1"
head = ET.SubElement(root, "testcase")
head.attrib["author"] = "rhe@redhat.com"
head.attrib["priority"] = "P1"
head.attrib["automated"] = ""
head.attrib["status"] = "PROPOSED"
title = ET.SubElement(head, "summary")
title.text = table['title']
title = ET.SubElement(head, "categoryname")
title.text = "default"
title = ET.SubElement(head, "defaulttester")
title = ET.SubElement(head, "notes")
title.text = str(table['description'])
title = ET.SubElement(head, "testplan_reference")
title.attrib["type"] = "xml_description"
title.text = "Fedora 15 Install Test Plan"
title = ET.SubElement(head, "action")
title.text = str(table['actions'])
title = ET.SubElement(head, "expectedresults")
title.text = str(table['results'])
title = ET.SubElement(head, "setup")
title.text = str(table['setup'])
title = ET.SubElement(head, "breakdown")
title = ET.SubElement(head, "tag")
title.text = str(table['tag'][0])
tree = ET.ElementTree(root)
tree.write("output.xml", encoding="UTF-8", xml_declaration=True)
string = ET.tostring(root)
xml_dom = xml.dom.minidom.parseString(string)
pretty_xml_as_string = xml_dom.toprettyxml()
return pretty_xml_as_string
if __name__ == "__main__":
(opts,action) = parse_args()
# Create mediawiki handle
wiki = MediaWiki(opts.url)
if action == 'categorymembers':
for cat_page in opts.categories:
pages = list_categorymembers(wiki, cat_page, opts.limit)
if pages:
print "\n".join(pages)
else:
print "No data found for '%s'" % cat_page
elif action == 'migration':
string = parse(wiki, opts.title)
table = extract_html(string, opts.title)
if opts.debug:
for key in table.keys():
print key, '\t', table[key]
pretty_xml_print = nitratexml(table)
print pretty_xml_print, '\n', '\"The xml file named output.xml is generated at ./\"'
else:
print "Unknown action requested '%s'" % action
|