Updated CRJW to support group permissions settings, and a simple permissions framework. See conf/converter.dokuwiki.properties spaceperms properties for example, Dokuwiki - improved escaping, Dokuwiki - support page history, New page history - load as ancestors framework. Works better with file systems where the ancestor pages have a different naming convention from the current page. Also potentially more graceful with hierarchies, Dokuwiki - Blog Link Report available to make manually updating links to pages that became blogs easier, Dokuwiki - Improved hierarchy
diff --git a/.gitignore b/.gitignore
index 3209c80..2443c87 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,4 @@
*.log
sampleData/*/*Output*
output
+tmp/
diff --git a/build.xml b/build.xml
index 6f22411..8f1feeb 100644
--- a/build.xml
+++ b/build.xml
@@ -235,7 +235,7 @@
<zipfileset src="${module.uwc.basedir}/lib/commons-httpclient-3.0.jar"/>
<zipfileset src="${module.uwc.basedir}/lib/commons-lang-2.1.jar"/>
<zipfileset src="${module.uwc.basedir}/lib/commons-logging-1.0.4.jar"/>
- <zipfileset src="${module.uwc.basedir}/lib/confluence-xmlrpc-wrapper-v5.1.0.jar"/>
+ <zipfileset src="${module.uwc.basedir}/lib/confluence-xmlrpc-wrapper-v5.2.0.jar"/>
<zipfileset src="${module.uwc.basedir}/lib/dom4j-1.4-full.jar"/>
<zipfileset src="${module.uwc.basedir}/lib/forms_rt.jar"/>
<zipfileset src="${module.uwc.basedir}/lib/jakarta-oro-2.0.8.jar"/>
diff --git a/conf/converter.dokuwiki.properties b/conf/converter.dokuwiki.properties
index 3d2b22b..894cad4 100644
--- a/conf/converter.dokuwiki.properties
+++ b/conf/converter.dokuwiki.properties
@@ -53,6 +53,22 @@
#DokuWiki.001.spacename-rule-prefix.property=
#DokuWiki.001.spacename-rule-uppercase.property=
+## Some optional space permission creation rules
+## Change group to the target groupname. If the group doesn't exist, it will
+## be created. The permissions should be a comma delimited list of permission
+## strings as defined under the Space Permissions section here:
+## https://developer.atlassian.com/display/CONFDEV/Confluence+XML-RPC+and+SOAP+APIs
+# Everything but admin
+DokuWiki.001.spaceperms.property={groupname}group{permissions}VIEWSPACE,EDITSPACE,EXPORTPAGE,SETPAGEPERMISSIONS,REMOVEPAGE,EDITBLOG,REMOVEBLOG,COMMENT,REMOVECOMMENT,CREATEATTACHMENT,REMOVEATTACHMENT,REMOVEMAIL,EXPORTSPACE
+# read-only
+#DokuWiki.001.spaceperms.property={groupname}group{permissions}VIEWSPACE
+## when set to true spaceperms-addgroup will add groups represented by
+## the space-perms property. Default val is true
+#Dokuwiki.001.spaceperms-addgroup.property=false
+
+
+
+
## Set the attachment directory value to a :: delimited list of media directories. It should
## correspond to your pages setting, but with "media" instead of "pages" as the penultimate directory
#DokuWiki.001.attachmentdirectory.property=/Absolute/path/to/media/dir::/Absolute/path/to/media/foo
@@ -67,7 +83,8 @@
# We redo this so that the results of leadingspaces are tokenized
DokuWiki.004.code.java-regex-tokenizer=(?s)<code>(.*?)<\/code>{replace-with}{code}$1{code}
DokuWiki.009.esc-lbrackets.java-regex=(?<!\[)\[(?!\[){replace-with}\\[
-DokuWiki.009.esc-lcurlybrace.java-regex=(?<!\{)\{(?!\{){replace-with}\\{
+DokuWiki.009.esc-lcurlybrace1.java-regex=\\\{(?!\{){replace-with}\\\\{
+DokuWiki.009.esc-lcurlybrace2.java-regex=(?<!\{\\)\{(?!\{){replace-with}\\{
## Tag Plugin - https://www.dokuwiki.org/plugin:tag
DokuWiki.010.tags.class=com.atlassian.uwc.converters.dokuwiki.TagConverter
## Blog Macro - we need to tokenize this to save from other handling
@@ -119,7 +136,7 @@
# Tables -- must be run before the image and link converters
DokuWiki.21.prep-colspans.class=com.atlassian.uwc.converters.dokuwiki.PrepColSpansConverter
-DokuWiki.22.table1.perl=s/\^/||/g
+DokuWiki.23.table1.perl=s/\^/||/g
# Interwiki links
# Match [...|doku>...]
@@ -198,7 +215,16 @@
#DokuWiki.7.meta-dir.property=/Set/This/To/Meta/Directory
#DokuWiki.7.users-must-exist.property=false
#DokuWiki.7.userdata.class=com.atlassian.uwc.converters.dokuwiki.DokuwikiUserDate
-
+## (OPTIONAL) Page History
+## To preserve history, uncomment these properties and set the ancestor-dir to
+## your dokuwiki attic directory
+#DokuWiki.8000.switch.page-history-preservation=true
+#DokuWiki.8000.suffix.page-history-preservation=[.]#[.]txt.gz
+#DokuWiki.8000.page-history-load-as-ancestors.property=true
+#DokuWiki.8000.page-history-load-as-ancestors-dir.property=/Set/This/To/Attic/Directory
+#DokuWiki.8000.page-history-load-as-ancestors-isgzip.property=true
+#DokuWiki.8000.page-history-sortwithtimestamp.property=true
+#DokuWiki.8000.page-history-load-as-ancestors-lastiscurrent.property=true
# Detokenize (code blocks)
@@ -216,8 +242,11 @@
## Handle direct to confluence xhtml transformations
DokuWiki.952.tagcloud.java-regex=[~]<sub>TAGCLOUD</sub>[~]{replace-with}<p><ac:macro ac:name="listlabels" /></p>
-# Handle Table col and rowspans
-DokuWiki.96.table-rowandcolspans.class=com.atlassian.uwc.converters.dokuwiki.TableRowColSpanConverter
+# Optional: Handle Table col and rowspans
+# This will only work with Confluence 4+
+# Will handle some tables with col and rowspans, but may need additional dev,
+# depending on the complexity of your tables.
+#DokuWiki.96.table-rowandcolspans.class=com.atlassian.uwc.converters.dokuwiki.TableRowColSpanConverter
# Detokenize (html tags) - we do this to address cases where the users had
# Dokuwiki markup and html tagged markup. DokuWiki.92 tokenized the tags before
@@ -229,6 +258,11 @@
## Blogs
## Optional. Use the blog-namespaces property to identify the namespaces
## (:: delimited) that should be treated as blogs
+## Warning! Links to such pages will not be properly preserved.
+## If you are using the HierarchyLinkConverter, you can
+## grep for the following in the uwc.log to identify pages links
+## that will need to be manually updated:
+## grep "Blog Link Report" uwc.log
#DokuWiki.999.blog.class=com.atlassian.uwc.converters.dokuwiki.BlogConverter
#DokuWiki.999.blog-namespaces.property=ns/blog::ns2/blog
diff --git a/lib/confluence-xmlrpc-wrapper-v5.1.0.jar b/lib/confluence-xmlrpc-wrapper-v5.1.0.jar
deleted file mode 100644
index d5f2729..0000000
--- a/lib/confluence-xmlrpc-wrapper-v5.1.0.jar
+++ /dev/null
Binary files differ
diff --git a/lib/confluence-xmlrpc-wrapper-v5.2.0.jar b/lib/confluence-xmlrpc-wrapper-v5.2.0.jar
new file mode 100644
index 0000000..9a4e378
--- /dev/null
+++ b/lib/confluence-xmlrpc-wrapper-v5.2.0.jar
Binary files differ
diff --git a/sampleData/engine/history/ancestors/foo/other.1234567778.txt b/sampleData/engine/history/ancestors/foo/other.1234567778.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/sampleData/engine/history/ancestors/foo/other.1234567778.txt
diff --git a/sampleData/engine/history/ancestors/foo/page.1234567890.txt b/sampleData/engine/history/ancestors/foo/page.1234567890.txt
new file mode 100644
index 0000000..0e1045d
--- /dev/null
+++ b/sampleData/engine/history/ancestors/foo/page.1234567890.txt
@@ -0,0 +1 @@
+Test 0
diff --git a/sampleData/engine/history/ancestors/foo/page.1234567891.txt b/sampleData/engine/history/ancestors/foo/page.1234567891.txt
new file mode 100644
index 0000000..ac5ab8f
--- /dev/null
+++ b/sampleData/engine/history/ancestors/foo/page.1234567891.txt
@@ -0,0 +1 @@
+Test 1
diff --git a/sampleData/engine/history/ancestors/foo/page.1234567892.txt b/sampleData/engine/history/ancestors/foo/page.1234567892.txt
new file mode 100644
index 0000000..110c58e
--- /dev/null
+++ b/sampleData/engine/history/ancestors/foo/page.1234567892.txt
@@ -0,0 +1 @@
+Test 2
diff --git a/sampleData/engine/history/ancestors/gzip/page.1234567890.txt.gz b/sampleData/engine/history/ancestors/gzip/page.1234567890.txt.gz
new file mode 100644
index 0000000..e205013
--- /dev/null
+++ b/sampleData/engine/history/ancestors/gzip/page.1234567890.txt.gz
Binary files differ
diff --git a/sampleData/engine/history/meta/foo/page.changes b/sampleData/engine/history/meta/foo/page.changes
new file mode 100644
index 0000000..b26c589
--- /dev/null
+++ b/sampleData/engine/history/meta/foo/page.changes
@@ -0,0 +1,4 @@
+1233490141 127.0.0.1 C start laurakolker created
+1234567892 127.0.0.1 E start authortest
+1263490629 127.0.0.1 E start notmod comment
+1263490649 127.0.0.1 E start notmod
diff --git a/sampleData/engine/history/pages/foo/page.txt b/sampleData/engine/history/pages/foo/page.txt
new file mode 100644
index 0000000..b25a4e0
--- /dev/null
+++ b/sampleData/engine/history/pages/foo/page.txt
@@ -0,0 +1 @@
+Test Current
diff --git a/sampleData/engine/history/pages/gzip/page.txt b/sampleData/engine/history/pages/gzip/page.txt
new file mode 100644
index 0000000..5762c89
--- /dev/null
+++ b/sampleData/engine/history/pages/gzip/page.txt
@@ -0,0 +1 @@
+Original
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/BlogConverter.java b/src/com/atlassian/uwc/converters/dokuwiki/BlogConverter.java
index 711802a..970e435 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/BlogConverter.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/BlogConverter.java
@@ -25,15 +25,20 @@
protected boolean namespaceIsBlog(String path) {
String nsString = getProperties().getProperty("blog-namespaces", null);
- if (nsString == null) return false;
- String[] namespaces = nsString.split("::");
+ return namespaceIsBlog(path, nsString);
+ }
+
+ public static boolean namespaceIsBlog(String path, String nsPropString) {
+ if (nsPropString == null) return false;
+ String[] namespaces = nsPropString.split("::");
for (String namespace : namespaces) {
if (sameNamespace(path, namespace)) return true;
}
return false;
}
+
- protected boolean sameNamespace(String path, String namespace) {
+ protected static boolean sameNamespace(String path, String namespace) {
while (!"".equals(path)) {
if (path.endsWith(namespace)) {
return true;
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/BlogConverterTest.java b/src/com/atlassian/uwc/converters/dokuwiki/BlogConverterTest.java
index 8ee7191..9cdca5d 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/BlogConverterTest.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/BlogConverterTest.java
@@ -31,25 +31,15 @@
assertTrue(page.isBlog());
}
- //DELETE
- public void testRealEx() throws IOException {
- tester.getProperties().setProperty("blog-namespaces", "blog/magritte");
- tester.getProperties().setProperty("space-magritte", "blog/magritte,blog/magritte_closures,develop/teams/teammagritte");
- File file = new File ("/Users/laura/Code/Clients/AppFusions/projects/TAMigration/dokuwikiexport/wiki/data" +
- "/pages/blog/magritte.txt");
- Page page = new Page(file);
- page.setOriginalText(FileUtils.readTextFile(file));
- tester.convert(page);
- assertTrue(page.isBlog());
- assertTrue(page.getConvertedText().contains(getExpected("5", "magritte")));
- }
public void testNamespaceIsBlog() {
File file = new File ("sampleData/dokuwiki/SampleDokuwiki-InputLists.txt");
+ assertTrue(tester.namespaceIsBlog("sampleData/dokuwiki"));
assertTrue(tester.namespaceIsBlog(file.getPath()));
file = new File ("sampleData/engine/README.txt");
assertFalse(tester.namespaceIsBlog(file.getPath()));
+
}
public void testConvertBlogMacro() {
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/DiscussionConverter.java b/src/com/atlassian/uwc/converters/dokuwiki/DiscussionConverter.java
index 95be1af..9e43841 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/DiscussionConverter.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/DiscussionConverter.java
@@ -15,11 +15,14 @@
import com.atlassian.uwc.converters.tikiwiki.RegexUtil;
import com.atlassian.uwc.ui.Comment;
import com.atlassian.uwc.ui.Page;
+import com.atlassian.uwc.ui.VersionPage;
public class DiscussionConverter extends DokuwikiUserDate {
@Override
public void convert(Page page) {
+ if (page instanceof VersionPage) return;
+// log.debug("Discussion Conversion --- Start");
String input = page.getOriginalText();
if (hasDiscussion(input)) {
String commentData = getCommentData(page.getFile());
@@ -32,6 +35,7 @@
}
}
}
+ log.debug("Discussion Conversion --- Completed");
}
Pattern discussionP = Pattern.compile("~~DISCUSSION[^~]*~~");
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/DokuwikiUserDate.java b/src/com/atlassian/uwc/converters/dokuwiki/DokuwikiUserDate.java
index 95ed00a..9544b6f 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/DokuwikiUserDate.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/DokuwikiUserDate.java
@@ -11,12 +11,13 @@
import com.atlassian.uwc.converters.BaseConverter;
import com.atlassian.uwc.ui.FileUtils;
import com.atlassian.uwc.ui.Page;
+import com.atlassian.uwc.ui.VersionPage;
public class DokuwikiUserDate extends HierarchyTarget {
Logger log = Logger.getLogger(this.getClass());
public void convert(Page page) {
- String changeFilepath = createChangeFilename(page.getFile().getPath());
+ String changeFilepath = createChangeFilename(page);
if (changeFilepath == null) {
log.warn("Could not handle user and date data. Check filepath-hierarchy-ignorable-ancestors amd meta-dir settings. Skipping");
return;
@@ -36,22 +37,40 @@
return;
}
//not preserving history at this time
- String lastline = getLastLine(changeContent);
- ChangeData data = getData(lastline);
+ String line = "";
+ if (page instanceof VersionPage) {
+ line = getHistoryLine(changeContent, page.getTimestamp());
+ }
+ else
+ line = getLastLine(changeContent);
+ ChangeData data = getData(line);
if (data == null) {
log.warn("changes content was malformed in file: " + changeFilepath + ". Skipping.");
return;
}
- long timestring = Long.parseLong(data.timestamp);
- Date date = new Date(timestring*1000); //multiply x 1000 because the Date interface is in milliseconds
- page.setTimestamp(date);
+ if (!(page instanceof VersionPage)) { //VersionPage already has timestamp set
+ long timestring = Long.parseLong(data.timestamp);
+ Date date = new Date(timestring*1000); //multiply x 1000 because the Date interface is in milliseconds
+ log.debug("User Date Converter - setting timestamp: " + data.timestamp);
+ page.setTimestamp(date);
+ }
+ log.debug("User Date Converter - setting author: " + data.user);
page.setAuthor(data.user);
}
- private String createChangeFilename(String path) {
+ protected String getHistoryLine(String changeContent, Date timestamp) {
+ String epoch = (timestamp.getTime()/1000)+"";
+ Pattern p = Pattern.compile("(?<=^|\n)"+epoch+"[^\n]+");
+ Matcher m = p.matcher(changeContent);
+ if (m.find()) return m.group();
+ else log.debug("Could not get history line for timestamp: " + epoch);
+ return getLastLine(changeContent);
+ }
+
+ protected String createChangeFilename(Page page) {
+ String path = getRelativePath(page);
return getMetaFilename(path, ".changes");
}
-
Pattern lastline = Pattern.compile("[^\n]*$");
private String getLastLine(String input) {
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/DokuwikiUserDateTest.java b/src/com/atlassian/uwc/converters/dokuwiki/DokuwikiUserDateTest.java
index a2c2576..3db6c4a 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/DokuwikiUserDateTest.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/DokuwikiUserDateTest.java
@@ -9,6 +9,7 @@
import org.apache.log4j.PropertyConfigurator;
import com.atlassian.uwc.ui.Page;
+import com.atlassian.uwc.ui.VersionPage;
public class DokuwikiUserDateTest extends TestCase {
@@ -35,7 +36,8 @@
Date date = page.getTimestamp();
assertNotNull(date);
- assertEquals(new Date(1263490649*1000), date);
+ Date exp = new Date(new Long(1263490649)*1000);
+ assertEquals(exp, date);
String author = page.getAuthor();
assertNotNull(author);
@@ -57,11 +59,96 @@
Date date = page.getTimestamp();
assertNotNull(date);
- assertEquals(new Date(1263490771*1000), date);
+ assertEquals(new Date(new Long(1263490771)*1000), date);
String author = page.getAuthor();
assertNull(author);
}
+ public void testVersionPage() {
+ tester.getProperties().setProperty("filepath-hierarchy-ignorable-ancestors", "sampleData/engine/history/pages");
+ tester.getProperties().setProperty("meta-dir", "sampleData/engine/history/meta");
+ String pagedir = "sampleData/engine/history/ancestors/foo/page.1234567892.txt";
+ Page page = new VersionPage(new File(pagedir));
+ page.setPath(page.getFile().getPath().replaceFirst("\\/[^\\/]*$", ""));
+ page.setName("page");
+ page.setTimestamp(new Date(new Long(1234567892)*1000)); //we set this in the engine for VersionPage
+ page.setParent(new Page(new File("sampleData/engine/history/pages/foo/page.txt")));
+
+ assertNotNull(page);
+ assertNull(page.getAuthor());
+
+ tester.convert(page);
+ assertNotNull(page);
+ assertNotNull(page.getAuthor());
+
+ String author = page.getAuthor();
+ assertNotNull(author);
+ assertEquals("authortest", author);
+ }
+
+
+ public void testCreateChangeFilename_Basic() {
+ String input, expected, actual;
+
+ String pagedir = "sampleData/dokuwiki/junit_resources/pages";
+ Page page = new Page(new File(pagedir+"/start.txt"));
+
+ expected = "sampleData/dokuwiki/junit_resources/meta/start.changes";
+ actual = tester.createChangeFilename(page);
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+ }
+
+ public void testCreateChangeFilename_WithNS() {
+ tester.getProperties().setProperty("filepath-hierarchy-ignorable-ancestors", "sampleData/engine/history/pages");
+ tester.getProperties().setProperty("meta-dir", "sampleData/engine/history/meta");
+ tester.getProperties().setProperty("page-history-load-as-ancestors-dir", "sampleData/engine/history/ancestors");
+
+ String input, expected, actual;
+ String pagepath = "sampleData/engine/history/pages/foo/page.txt";
+ Page page = new Page(new File(pagepath));
+
+ expected = "sampleData/engine/history/meta/foo/page.changes";
+ actual = tester.createChangeFilename(page);
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+ }
+
+ public void testCreateChangeFilename_Ancestor() {
+ tester.getProperties().setProperty("filepath-hierarchy-ignorable-ancestors", "sampleData/engine/history/pages");
+ tester.getProperties().setProperty("meta-dir", "sampleData/engine/history/meta");
+ tester.getProperties().setProperty("page-history-load-as-ancestors-dir", "sampleData/engine/history/ancestors");
+
+ String input, expected, actual;
+ String pagepath = "sampleData/engine/history/ancestors/foo/page.1234567892.txt";
+ Page page = new VersionPage(new File(pagepath));
+ page.setParent(new Page(new File("sampleData/engine/history/pages/foo/page.txt")));
+
+ expected = "sampleData/engine/history/meta/foo/page.changes";
+ actual = tester.createChangeFilename(page);
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+ }
+
+ public void testGetHistoryLine() {
+ String input, expected, actual;
+ input = "1233490141 127.0.0.1 C start laurakolker created\n" +
+ "1234567892 127.0.0.1 E start authortest\n" +
+ "1263490629 127.0.0.1 E start notmod comment\n" +
+ "1263490649 127.0.0.1 E start notmod\n" +
+ "";
+ Date timestamp = new Date(new Long(1234567892)*1000);
+ expected = "1234567892 127.0.0.1 E start authortest";
+ actual = tester.getHistoryLine(input, timestamp);
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+
+ timestamp = new Date(new Long(1263490629)*1000);
+ expected = "1263490629 127.0.0.1 E start notmod comment";
+ actual = tester.getHistoryLine(input, timestamp);
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+ }
}
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/HierarchyLinkConverter.java b/src/com/atlassian/uwc/converters/dokuwiki/HierarchyLinkConverter.java
index c373933..51006c8 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/HierarchyLinkConverter.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/HierarchyLinkConverter.java
@@ -13,12 +13,19 @@
public class HierarchyLinkConverter extends HierarchyTarget {
+ private String origPageTitle;
Logger log = Logger.getLogger(this.getClass());
public void convert(Page page) {
+ log.debug("Link Conversion --- Start");
String input = page.getOriginalText();
- String converted = convertLink(input, getCurrentPath(page), getSpacekey(page), page.getFile().getPath());
+ origPageTitle = page.getName();
+ String current = getCurrentPath(page);
+ String spacekey = getSpacekey(page);
+ log.debug("current: '" + current + "' spacekey: '" + spacekey + "'");
+ String relativePath = getRelativePath(page);
+ String converted = convertLink(input, current, spacekey, relativePath);
page.setConvertedText(converted);
-
+ log.debug("Link Conversion --- Completed");
}
public String getSpacekey(Page page) {
@@ -43,6 +50,7 @@
while (linkFinder.find()) {
found = true;
String target = linkFinder.group(1);
+ log.debug("link target orig: " + target);
String alias = null;
if (target.startsWith("\\\\")) continue; //UNC link
if (target.contains("|")) {
@@ -50,6 +58,9 @@
target = parts[0];
alias = parts[1];
}
+ //FIXME anchors? we're not ready to transform these, so just get rid of the anchor part
+ if (target.contains("#")) target = target.replaceAll("#[^|]*", "");
+ log.debug("target: '" + target + "' and alias: '" + alias + "'");
//remove any opening colons (:namespace:page)
target = target.trim();
if (!isExternal(target)) {
@@ -57,9 +68,11 @@
target = target.replaceAll("^[.]*", "");
if (currentPath != null && !currentPath.equals(currentSpacekey)) { //need to add hierarchy in
String pre = currentPath.replaceAll("\\/", ":");
+// log.debug("pre = " +pre);
if (pre.endsWith(".txt")) {
pre = pre.replaceFirst("[.]txt$", "");
String sibling = target.replaceFirst(":[^:]+$", "");
+// log.debug("sibling = " +sibling);
if (pre.endsWith(sibling)) {
target = target.replaceFirst("^:[^:]+:", "");
}
@@ -76,8 +89,8 @@
}
if (allspaces.contains(targetPart1))
containsSpace = true;
- log.debug("--LinkConverter");
- log.debug("targetPart1 =" + targetPart1);
+// log.debug("--LinkConverter");
+// log.debug("targetPart1 =" + targetPart1);
//get rid of unnecessary links to start
//(start page content will be moved to parent in DokuwikiHierarchy
//unless the start page is a top level page in the space)
@@ -89,10 +102,11 @@
String hierarchy = target; //save for later
//is there a meta title to be used?
// log.debug("pagepath = " + pagepath);
- String metaFilename = getMetaFilename(pagepath, ".meta");
+ String origMetaFilename = getMetaFilename(pagepath, ".meta");
+ String metaFilename = origMetaFilename;
// log.debug("isOne = " + isOne + ", target = " + target + ", metaFilename = " + metaFilename);
metaFilename = getTargetMetaFilename(target, metaFilename, isOne);
- log.debug("metaFilename = " + metaFilename);
+// log.debug("metaFilename = " + metaFilename);
String metatitle = HierarchyTitleConverter.getMetaTitle(metaFilename);
log.debug("metatitle = " + metatitle);
//get confluence page name and fix the case to match HierarchyTitleConverter
@@ -104,9 +118,9 @@
//fix collisions
String linkSpacekey = currentSpacekey;
targetPart1 = targetPart1.replaceAll(":+", File.separator);
- log.debug("containsSpace: " + containsSpace + ", " +
- "ns: "+ namespaces.containsKey(targetPart1)
- +", tp1: '" + targetPart1+"'");
+// log.debug("containsSpace: " + containsSpace + ", " +
+// "ns: "+ namespaces.containsKey(targetPart1)
+// +", tp1: '" + targetPart1+"'");
if (!containsSpace && namespaces.containsKey(targetPart1)) {
linkSpacekey = namespaces.get(targetPart1);
log.debug("linkSpacekey = " + linkSpacekey);
@@ -118,6 +132,7 @@
//add spacekey to target if necessary
if (!target.contains(":") || containsSpace)
target = linkSpacekey + ":" + target;
+ blogLinkReport(namespaces, targetPart1, origMetaFilename, spacekey, target);
log.debug("link target = " + target);
}
//build complete link
@@ -136,6 +151,24 @@
}
+ private void blogLinkReport(HashMap<String, String> namespaces,
+ String ns, String pageMetaFilename,
+ String spacekey, String linktarget) {
+ if (namespaces.containsKey(ns)) {
+ String nsString = getProperties().getProperty("blog-namespaces", null);
+ if (BlogConverter.namespaceIsBlog(ns, nsString)) {
+ String pagetitle = HierarchyTitleConverter.getMetaTitle(pageMetaFilename);
+ if (pagetitle == null || "".equals(pagetitle))
+ pagetitle = origPageTitle;
+ pagetitle = HierarchyTitleConverter.fixTitle(pagetitle);
+ log.info("Blog Link Report - page: '" + pagetitle + "' " +
+ "in space: '" + spacekey + "' " +
+ "linking to: '" + linktarget + "'");
+ }
+ }
+
+ }
+
protected String getTargetMetaFilename(String target, String metaFilename, boolean isOne) {
target=target.replaceAll(":+", File.separator);
if (!target.startsWith(File.separator)) target = File.separator + target;
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/HierarchyLinkConverterTest.java b/src/com/atlassian/uwc/converters/dokuwiki/HierarchyLinkConverterTest.java
index f6289ef..b687921 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/HierarchyLinkConverterTest.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/HierarchyLinkConverterTest.java
@@ -11,6 +11,7 @@
import com.atlassian.uwc.ui.FileUtils;
import com.atlassian.uwc.ui.Page;
+import com.atlassian.uwc.ui.VersionPage;
public class HierarchyLinkConverterTest extends TestCase {
@@ -35,6 +36,11 @@
tester.getProperties().setProperty("filepath-hierarchy-ignorable-ancestors", HierarchyTitleConverterTest.PAGESDIR);
}
+
+ protected void tearDown() {
+ tester.getProperties().setProperty("page-history-load-as-ancestors-dir", "");
+ }
+
public void testConvertLink() {
String input, expected, actual;
input = "[[drink:start]]\n" +
@@ -151,6 +157,20 @@
assertEquals(expected, actual);
}
+
+ public void testConvertWithMetaTitle_Ancestor() throws IOException {
+ tester.getProperties().setProperty("page-history-load-as-ancestors-dir", "/Users/laura/Code/Git/uwc/sampleData/dokuwiki/junit_resources/attic/");
+ String input, expected, actual;
+ input = "[[.:foo]]\n" +
+ "[[:foo:bar]]\n";
+ expected = "[xyz:Foo Tralala]\n" +
+ "[xyz:Harumph BAr]\n";
+ tester.getProperties().setProperty("filepath-hierarchy-ignorable-ancestors", "/Users/laura/Code/Git/uwc/sampleData/dokuwiki/junit_resources/pages");
+ String pretendthispagepath = "/Users/laura/Code/Git/uwc/sampleData/dokuwiki/junit_resources/pages/test.txt";
+ actual = tester.convertLink(input, "", "xyz", pretendthispagepath);
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+ }
public void testGetTargetMetaFilename() {
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/HierarchyTarget.java b/src/com/atlassian/uwc/converters/dokuwiki/HierarchyTarget.java
index 6b79789..dc911c6 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/HierarchyTarget.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/HierarchyTarget.java
@@ -12,6 +12,7 @@
import com.atlassian.uwc.converters.BaseConverter;
import com.atlassian.uwc.ui.Page;
+import com.atlassian.uwc.ui.VersionPage;
public abstract class HierarchyTarget extends BaseConverter {
Pattern space = Pattern.compile("space-([^-]*)");
@@ -79,16 +80,16 @@
String tmpMetaFilename = targetMetaFilename;
for (int i = parents.length-2;i>=0;i--) {
String parent = parents[i];
- log.debug("HT: parent = '" + parent + "', tmpMetaFilename:'" + tmpMetaFilename + "'");
+// log.debug("HT: parent = '" + parent + "', tmpMetaFilename:'" + tmpMetaFilename + "'");
if (parent.toLowerCase().equals(target.toLowerCase())) continue;
if ("".equals(parent)) continue;
if (tmpMetaFilename != null) {
Matcher metaFinder = metaFile.matcher(tmpMetaFilename);
if (metaFinder.find()) {
String parentMetaFilename = metaFinder.replaceFirst(".meta");
- log.debug("HT: parentMetaFilename: '" + parentMetaFilename + "'");
+// log.debug("HT: parentMetaFilename: '" + parentMetaFilename + "'");
String tmpparent = HierarchyTitleConverter.getMetaTitle(parentMetaFilename);
- log.debug("HT: tmpparent: '" + tmpparent + "'");
+// log.debug("HT: tmpparent: '" + tmpparent + "'");
if (tmpparent != null && !"".equals(tmpparent)) parent = tmpparent;
tmpMetaFilename = parentMetaFilename; //in case we have to go again
}
@@ -132,20 +133,23 @@
protected String getCurrentPath(Page page) {
String ignorable = getProperties().getProperty("filepath-hierarchy-ignorable-ancestors", "");
+ if (page instanceof VersionPage) {
+ ignorable = getProperties().getProperty("page-history-load-as-ancestors-dir", "");
+ }
String full = page.getPath();
if (full == null) return null;
return full.replaceAll("\\Q"+ignorable + "\\E", "");
}
protected String getMetaFilename(String path, String filetype) {
- String metadir = getProperties().getProperty("meta-dir", null);
- if (metadir == null) {
- return null;
- }
String ignorable = getProperties().getProperty("filepath-hierarchy-ignorable-ancestors", null);
if (ignorable == null) {
return null;
}
+ String metadir = getProperties().getProperty("meta-dir", null);
+ if (metadir == null) {
+ return null;
+ }
String relative = path.replaceFirst("\\Q" + ignorable + "\\E", "");
relative = relative.replaceFirst("\\.txt$", filetype);
if (relative.startsWith(File.separator) && metadir.endsWith(File.separator))
@@ -154,5 +158,13 @@
relative = File.separator + relative;
return metadir + relative;
}
+
+ public String getRelativePath(Page page) {
+ String path = page.getFile().getPath();
+ if (page instanceof VersionPage)
+ path = page.getParent().getFile().getPath();
+ log.debug("HierarchyTarget: relative path = " + path);
+ return path;
+ }
}
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/HierarchyTitleConverter.java b/src/com/atlassian/uwc/converters/dokuwiki/HierarchyTitleConverter.java
index bbe0844..5be3327 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/HierarchyTitleConverter.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/HierarchyTitleConverter.java
@@ -9,11 +9,15 @@
import com.atlassian.uwc.ui.FileUtils;
import com.atlassian.uwc.ui.Page;
+import com.atlassian.uwc.ui.VersionPage;
public class HierarchyTitleConverter extends DokuwikiUserDate {
static Logger log = Logger.getLogger(HierarchyTitleConverter.class);
public void convert(Page page) {
+ if (page instanceof VersionPage) { //we'll address this in the engine
+ return;
+ }
String name = page.getName();
//check the metadata for a title
name = getMetaTitle(page);
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/SpaceConverter.java b/src/com/atlassian/uwc/converters/dokuwiki/SpaceConverter.java
index c081f4e..1d22fed 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/SpaceConverter.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/SpaceConverter.java
@@ -17,6 +17,10 @@
@Override
public void convert(Page page) {
String path = page.getFile().getPath();
+ if (page.getParent() != null && page.getParent().getSpacekey() != null) {
+ page.setSpacekey(page.getParent().getSpacekey());
+ return;
+ }
String ancestors = getProperties().getProperty("filepath-hierarchy-ignorable-ancestors", "");
if (path.startsWith(ancestors)) path = path.replaceFirst("\\Q"+ancestors+"\\E", "");
log.debug("Path after removing ancestors = " + path);
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/SpaceConverterTest.java b/src/com/atlassian/uwc/converters/dokuwiki/SpaceConverterTest.java
index 41d5e6f..46b5ddf 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/SpaceConverterTest.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/SpaceConverterTest.java
@@ -9,6 +9,7 @@
import org.apache.log4j.PropertyConfigurator;
import com.atlassian.uwc.ui.Page;
+import com.atlassian.uwc.ui.VersionPage;
public class SpaceConverterTest extends TestCase {
@@ -148,4 +149,21 @@
assertEquals(expkey, page.getSpacekey());
}
+ public void testConvert_VersionPage() {
+
+ tester.getProperties().setProperty("space-lala","sampleData/dokuwiki/SampleDokuwiki-InputLists");
+ String path = "sampleData/dokuwiki/SampleDokuwiki-InputLists.txt";
+ String expkey = "tralala";
+ File file = new File(path);
+ assertTrue(file.exists());
+ Page page = new VersionPage(file);
+ Page parent = new Page(new File("sampleData/dokuwiki/SampleDokuwiki-InputBasic.txt"));
+ parent.setSpacekey(expkey);
+ page.setParent(parent);
+ assertNull(page.getSpacekey());
+ tester.convert(page);
+ assertNotNull(page.getSpacekey());
+ assertEquals(expkey, page.getSpacekey());
+ }
+
}
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/TableRowColSpanConverter.java b/src/com/atlassian/uwc/converters/dokuwiki/TableRowColSpanConverter.java
index ac2b7f5..a99dceb 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/TableRowColSpanConverter.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/TableRowColSpanConverter.java
@@ -4,17 +4,25 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
+import org.apache.log4j.Logger;
+
import com.atlassian.uwc.converters.BaseConverter;
import com.atlassian.uwc.converters.tikiwiki.RegexUtil;
import com.atlassian.uwc.ui.Page;
+import com.atlassian.uwc.ui.VersionPage;
public class TableRowColSpanConverter extends BaseConverter {
+ Logger log = Logger.getLogger(this.getClass());
@Override
public void convert(Page page) {
String input = page.getOriginalText();
- String converted = convertColspans(input);
- converted = convertRowspans(converted);
+ String tmpconverted = convertColspans(input);
+ if (!(page instanceof VersionPage) && !input.equals(tmpconverted))
+ log.debug("Colspans detected: " + page.getName());
+ String converted = convertRowspans(tmpconverted);
+ if (!(page instanceof VersionPage) && !tmpconverted.equals(converted))
+ log.debug("Rowspans detected: " + page.getName());
page.setConvertedText(converted);
}
@@ -57,6 +65,7 @@
Pattern table = Pattern.compile("<table>(.*?)</table>", Pattern.DOTALL);
Pattern rowspan = Pattern.compile(":::");
Pattern tablerow = Pattern.compile("<tr>(.*?)</tr>", Pattern.DOTALL);
+ Pattern tdWithColspan = Pattern.compile("<t([dh])(?: colspan='(\\d+)')?>(.*?)</t[dh]>", Pattern.DOTALL);
protected String convertRowspans(String input) {
Matcher tableFinder = table.matcher(input);
StringBuffer sb = new StringBuffer();
@@ -65,6 +74,7 @@
Vector<Integer> rowindeces = new Vector<Integer>();
Vector<Integer> colindeces = new Vector<Integer>();
Vector<Integer> rowvals = new Vector<Integer>();
+ Vector<Integer> colclear = new Vector<Integer>();
found = true;
String tableContents = tableFinder.group(1);
Matcher rowspanFinder = rowspan.matcher(tableContents);
@@ -78,40 +88,64 @@
StringBuffer rowsb = new StringBuffer();
boolean rowfound = false;
boolean noteindex = true;
+
+ boolean clearrow = false;
while (rowFinder.find()) {
int lastcol = -1;
rowfound = true;
boolean newrow = true;
String rowcontents = rowFinder.group(1);
- Matcher tdFinder = td.matcher(rowcontents);
+ Matcher tdFinder = tdWithColspan.matcher(rowcontents);
StringBuffer tdsb = new StringBuffer();
boolean tdfound = false;
boolean rowspanfoundLast = true;
boolean rowspanfoundCurrent = true;
+ int rowspancountThisRow = 0;
+ int currentColOffset = 0;
while (tdFinder.find()) {
tdfound = true;
lastcol++;
- String cell = tdFinder.group(2);
+ String cell = tdFinder.group(3);
+ String colspanOffset = tdFinder.group(2);
+
rowspanFinder = rowspan.matcher(cell);
rowspanfoundLast = rowspanfoundCurrent;
rowspanfoundCurrent = rowspanFinder.find();
- if (!rowspanfoundCurrent) continue;
+ if (!rowspanfoundCurrent) { //no rowspan
+ colclear.add(lastcol+currentColOffset);
+ }
+ else { //found a rowspan!
+ tdFinder.appendReplacement(tdsb, ""); //remove the ::: cells
+ if (newrow && clearrow && rowspanVal > 1) {
+ rowspancount++;
+ fillRowSpanVals(rowindeces, rowvals, rowspanVal);
+ rowspanVal = currentColOffset+1;
+ colindeces.add(lastcol); //note the index of the current cell
+ rowindeces.add(lastrow-1);//note the index of the previous row
+ noteindex = false;
+ }
+ rowspanVal++;
- tdFinder.appendReplacement(tdsb, ""); //remove the ::: cells
- rowspanVal++;
- if (noteindex || !newrow) {
- rowspancount++;
- colindeces.add(lastcol); //note the index of the current cell
- rowindeces.add(lastrow-1);//note the index of the previous row
- noteindex = false;
+ if (noteindex || !newrow) {
+ rowspancount++;
+ rowspancountThisRow++;
+ colindeces.add(lastcol); //note the index of the current cell
+ rowindeces.add(lastrow-1);//note the index of the previous row
+ noteindex = false;
+ if (!newrow && rowspancountThisRow > 0) rowspanVal--;
+ colclear.removeAllElements();
+ }
+ else if (!rowspanfoundLast) {
+ noteindex = true;
+ rowvals.add(rowspanVal);
+ rowspanVal=currentColOffset+1;
+ }
+
+ newrow = false;
}
- else if (!rowspanfoundLast) {
- noteindex = true;
- rowvals.add(rowspanVal);
- rowspanVal=1;
- }
- newrow = false;
+ if (colspanOffset != null)
+ currentColOffset += (Integer.parseInt(colspanOffset));
}
if (tdfound) {
tdFinder.appendTail(tdsb);
@@ -125,9 +159,16 @@
replacement = RegexUtil.handleEscapesInReplacement(replacement);
rowFinder.appendReplacement(rowsb, replacement);
lastrow++;
+ boolean tmpclear = true;
+ for (int i = 0; i < lastcol-1; i++) {
+ if (!colclear.contains(i+currentColOffset)) tmpclear = false;
+ }
+ clearrow = tmpclear;
+ colclear.removeAllElements();
}
if (rowfound) {
- rowvals.add(rowspanVal);
+ fillRowSpanVals(rowindeces, rowvals, rowspanVal);
+ rowspanVal=1;
rowFinder.appendTail(rowsb);
tableContents = rowsb.toString();
}
@@ -170,7 +211,7 @@
rowcontents = tdsb.toString();
}
}
-
+
String replacement = "<tr>"+rowcontents+"</tr>";
replacement = RegexUtil.handleEscapesInReplacement(replacement);
rowFinder.appendReplacement(rowsb, replacement);
@@ -191,6 +232,14 @@
}
return input;
}
-
+ public void fillRowSpanVals(Vector<Integer> indeces, Vector<Integer> vals, int val) {
+ vals.add(val);
+ int last = indeces.get(vals.size()-1);
+ while (indeces.size() > vals.size()) {
+ if (indeces.get(vals.size()-1) == last)
+ vals.add(val);
+ else break;
+ }
+ }
}
diff --git a/src/com/atlassian/uwc/converters/dokuwiki/TableRowColSpanConverterTest.java b/src/com/atlassian/uwc/converters/dokuwiki/TableRowColSpanConverterTest.java
index af5d599..e0e472a 100644
--- a/src/com/atlassian/uwc/converters/dokuwiki/TableRowColSpanConverterTest.java
+++ b/src/com/atlassian/uwc/converters/dokuwiki/TableRowColSpanConverterTest.java
@@ -208,6 +208,7 @@
assertEquals(expected, actual);
}
+
public void testComplicated() { //multiple row and col spans?
String input, expected, actual;
input = "<table><tbody>\n" +
@@ -333,6 +334,142 @@
assertNotNull(actual);
assertEquals(expected, actual);
}
+
+ public void testSameAsComplicated_NoColspans() {
+ String input, expected, actual;
+ input = "<table><tbody>\n" +
+ "<tr>\n" +
+ "<th><p> Heading 1 </p></th>\n" +
+ "<th><p> Heading 2 </p></th>\n" +
+ "<th><p> Heading 3 </p></th>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 1 Col 1 </p></td>\n" +
+ "<td><p> Row 1 Col 2 </p></td>\n" +
+ "<td><p> Row 1 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 2 Col 1</p></td>\n" +
+ "<td><p> Row 2 Col 2</p></td>\n" +
+ "<td><p> Row 2 Col 3</p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 3 Col 1 </p></td>\n" +
+ "<td><p> Row 3 Col 2 </p></td>\n" +
+ "<td><p> Row 3 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 4 Col 1 </p></td>\n" +
+ "<td><p> this cell spans vertically </p></td>\n" +
+ "<td><p> Row 4 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 5 Col 1 </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> Row 5 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 6 Col 1 </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> Row 6 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 7 Col 1 </p></td>\n" +
+ "<td><p> Row 7 Col 2 </p></td>\n" +
+ "<td><p> Row 7 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 8 Col 1</p></td>\n" +
+ "<td><p> Row 8 Col 2</p></td>\n" +
+ "<td><p> Row 8 Col 3</p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 9 Col 1 </p></td>\n" +
+ "<td><p> Row 9 Col 2 </p></td>\n" +
+ "<td><p> Row 9 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> Row 10 Col 2</p></td>\n" +
+ "<td><p> Row 10 Col 3</p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> Row 11 Col 2</p></td>\n" +
+ "<td><p> Row 11 Col 3</p></td>\n" +
+ "</tr>\n" +
+ "</tbody></table>\n" +
+ "";
+ expected = "<table><tbody>\n" +
+ "<tr>\n" +
+ "<th><p> Heading 1 </p></th>\n" +
+ "<th><p> Heading 2 </p></th>\n" +
+ "<th><p> Heading 3 </p></th>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 1 Col 1 </p></td>\n" +
+ "<td><p> Row 1 Col 2 </p></td>\n" +
+ "<td><p> Row 1 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 2 Col 1</p></td>\n" +
+ "<td><p> Row 2 Col 2</p></td>\n" +
+ "<td><p> Row 2 Col 3</p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 3 Col 1 </p></td>\n" +
+ "<td><p> Row 3 Col 2 </p></td>\n" +
+ "<td><p> Row 3 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 4 Col 1 </p></td>\n" +
+ "<td rowspan='3'><p> this cell spans vertically </p></td>\n" +
+ "<td><p> Row 4 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 5 Col 1 </p></td>\n" +
+ "\n" +
+ "<td><p> Row 5 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 6 Col 1 </p></td>\n" +
+ "\n" +
+ "<td><p> Row 6 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 7 Col 1 </p></td>\n" +
+ "<td><p> Row 7 Col 2 </p></td>\n" +
+ "<td><p> Row 7 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> Row 8 Col 1</p></td>\n" +
+ "<td><p> Row 8 Col 2</p></td>\n" +
+ "<td><p> Row 8 Col 3</p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td rowspan='3'><p> Row 9 Col 1 </p></td>\n" +
+ "<td><p> Row 9 Col 2 </p></td>\n" +
+ "<td><p> Row 9 Col 3 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "\n" +
+ "<td><p> Row 10 Col 2</p></td>\n" +
+ "<td><p> Row 10 Col 3</p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "\n" +
+ "<td><p> Row 11 Col 2</p></td>\n" +
+ "<td><p> Row 11 Col 3</p></td>\n" +
+ "</tr>\n" +
+ "</tbody></table>\n";
+
+ Page page =new Page(null);
+ page.setOriginalText(input);
+ tester.convert(page);
+ actual = page.getConvertedText();
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+ }
public void testComplicated_SameRow() { //multiple row and col spans?
String input, expected, actual;
@@ -578,4 +715,187 @@
assertNotNull(actual);
assertEquals(expected, actual);
}
+
+ public void testMultTables_RowsProblem() {
+
+ String input, expected, actual;
+ input = "<table><tbody>\n" +
+ "<tr>\n" +
+ "<th><p> h1 </p></th>\n" +
+ "<th><p> h2 </p></th>\n" +
+ "<th><p> h3 </p></th>\n" +
+ "<th><p> h4 </p></th>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> foo </p></td>\n" +
+ "<td><p> bar </p></td>\n" +
+ "<td><p> baz </p></td>\n" +
+ "<td><p> tralala </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> meh </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> meep </p></td>\n" +
+ "</tr>\n" +
+ "</tbody></table>\n" +
+ "";
+ expected = "<table><tbody>\n" +
+ "<tr>\n" +
+ "<th><p> h1 </p></th>\n" +
+ "<th><p> h2 </p></th>\n" +
+ "<th><p> h3 </p></th>\n" +
+ "<th><p> h4 </p></th>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td rowspan='2'><p> foo </p></td>\n" +
+ "<td><p> bar </p></td>\n" +
+ "<td rowspan='2'><p> baz </p></td>\n" +
+ "<td><p> tralala </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "\n" +
+ "<td><p> meh </p></td>\n" +
+ "\n" +
+ "<td><p> meep </p></td>\n" +
+ "</tr>\n" +
+ "</tbody></table>\n" +
+ "";
+ Page page = new Page(null);
+ page.setOriginalText(input);
+ tester.convert(page);
+ actual = page.getConvertedText();
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+ }
+
+
+ public void testMultTables_RowsProblem2() {
+
+ String input, expected, actual;
+ input = "<table><tbody>\n" +
+ "<tr>\n" +
+ "<th><p> h1 </p></th>\n" +
+ "<th><p> h2 </p></th>\n" +
+ "<th><p> h3 </p></th>\n" +
+ "<th><p> h4 </p></th>\n" +
+ "<th><p> h5 </p></th>\n" +
+ "<th><p> h6 </p></th>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> thin </p></td>\n" +
+ "<td><p> man </p></td>\n" +
+ "<td><p> starring </p></td>\n" +
+ "<td><p> myrna </p></td>\n" +
+ "<td><p> loy </p></td>\n" +
+ "<td><p> tralalala </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> and </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> asta </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> foo </p></td>\n" +
+ "<td><p> bar </p></td>\n" +
+ "<td><p> arg</p></td>\n" +
+ "<td><p> this </p></td>\n" +
+ "<td><p> is </p></td>\n" +
+ "<td><p> annoying </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> testing </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> 123 </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> 1 </p></td>\n" +
+ "<td><p> 2 </p></td>\n" +
+ "<td><p> 3 </p></td>\n" +
+ "<td><p> 4 </p></td>\n" +
+ "<td><p> 5 </p></td>\n" +
+ "<td><p> 6 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> a </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> b </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "<td><p> ::: </p></td>\n" +
+ "</tr>\n" +
+ "</tbody></table>\n" +
+ "";
+ expected = "<table><tbody>\n" +
+ "<tr>\n" +
+ "<th><p> h1 </p></th>\n" +
+ "<th><p> h2 </p></th>\n" +
+ "<th><p> h3 </p></th>\n" +
+ "<th><p> h4 </p></th>\n" +
+ "<th><p> h5 </p></th>\n" +
+ "<th><p> h6 </p></th>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td rowspan='2'><p> thin </p></td>\n" +
+ "<td><p> man </p></td>\n" +
+ "<td rowspan='2'><p> starring </p></td>\n" +
+ "<td><p> myrna </p></td>\n" +
+ "<td rowspan='2'><p> loy </p></td>\n" +
+ "<td rowspan='2'><p> tralalala </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "\n" +
+ "<td><p> and </p></td>\n" +
+ "\n" +
+ "<td><p> asta </p></td>\n" +
+ "\n" +
+ "\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td rowspan='2'><p> foo </p></td>\n" +
+ "<td><p> bar </p></td>\n" +
+ "<td rowspan='2'><p> arg</p></td>\n" +
+ "<td><p> this </p></td>\n" +
+ "<td rowspan='2'><p> is </p></td>\n" +
+ "<td rowspan='2'><p> annoying </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "\n" +
+ "<td><p> testing </p></td>\n" +
+ "\n" +
+ "<td><p> 123 </p></td>\n" +
+ "\n" +
+ "\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "<td rowspan='2'><p> 1 </p></td>\n" +
+ "<td><p> 2 </p></td>\n" +
+ "<td rowspan='2'><p> 3 </p></td>\n" +
+ "<td><p> 4 </p></td>\n" +
+ "<td rowspan='2'><p> 5 </p></td>\n" +
+ "<td rowspan='2'><p> 6 </p></td>\n" +
+ "</tr>\n" +
+ "<tr>\n" +
+ "\n" +
+ "<td><p> a </p></td>\n" +
+ "\n" +
+ "<td><p> b </p></td>\n" +
+ "\n" +
+ "\n" +
+ "</tr>\n" +
+ "</tbody></table>\n";
+ Page page = new Page(null);
+ page.setOriginalText(input);
+ tester.convert(page);
+ actual = page.getConvertedText();
+ assertNotNull(actual);
+ assertEquals(expected, actual);
+ }
+
}
diff --git a/src/com/atlassian/uwc/hierarchies/DokuwikiHierarchy.java b/src/com/atlassian/uwc/hierarchies/DokuwikiHierarchy.java
index e13205b..512c1d1 100644
--- a/src/com/atlassian/uwc/hierarchies/DokuwikiHierarchy.java
+++ b/src/com/atlassian/uwc/hierarchies/DokuwikiHierarchy.java
@@ -139,17 +139,32 @@
else if (!parent.getPage().getSpacekey().equals(spacekey)) {
log.debug("...parent.getPage().getSpacekey: " + parent.getPage().getSpacekey() + "... and spacekey: " + spacekey);
log.debug("Copying branch to new parent because of spacekey: " + node.getName());
- HierarchyNode newparent = new HierarchyNode();
- newparent.setName(parent.getName());
- Page page = createPage(parent.getName());
- page.setSpacekey(spacekey);
- parent.removeChild(node);
- newparent.addChild(node);
- parent.getParent().addChild(newparent);
+
+ copyBranch(node, spacekey, parent);
+
}
return node;
}
+
+ public void copyBranch(HierarchyNode node, String spacekey,
+ HierarchyNode parent) {
+ HierarchyNode newparent = new HierarchyNode();
+ newparent.setName(parent.getName());
+ Page newparentpage = createPage(parent.getName());
+ newparentpage.setSpacekey(spacekey);
+ if (parent.getPage().getOriginalText() != null)
+ newparentpage.setOriginalText(parent.getPage().getOriginalText());
+ if (parent.getPage().getConvertedText() != null)
+ newparentpage.setConvertedText(parent.getPage().getConvertedText());
+ newparent.setPage(newparentpage);
+ parent.removeChild(node);
+ newparent.addChild(node);
+ parent.getParent().addChild(newparent);
+ if (parent.getParent().getName() == null) return;
+ copyBranch(newparent, spacekey, parent.getParent());
+ }
+
private void setTopNodeBranch(HierarchyNode root, Iterator topiter, HierarchyNode nexttopnode) {
Set<HierarchyNode> children = nexttopnode.getChildren();
topiter.remove(); //Only allowed way to remove from an iterator.
diff --git a/src/com/atlassian/uwc/hierarchies/DokuwikiHierarchyTest.java b/src/com/atlassian/uwc/hierarchies/DokuwikiHierarchyTest.java
index 346a901..d53f184 100644
--- a/src/com/atlassian/uwc/hierarchies/DokuwikiHierarchyTest.java
+++ b/src/com/atlassian/uwc/hierarchies/DokuwikiHierarchyTest.java
@@ -263,6 +263,7 @@
nodes0.addAll(top);
String[] exp = {"Drink", "Food", "Food"};
testNodeResults(nodes0, exp);
+ testNodeHasSpace(nodes0);
//needs more than one level of parent to avoid collision
HierarchyNode drink1 = getNode("Drink", nodes0);
@@ -274,9 +275,18 @@
String[] exp2 = {"Juice", "Water"};
testNodeResults(fruitnodes1, exp2);
- HierarchyNode pie = getNode("Food", nodes0);
- Vector<HierarchyNode> tmpchildren = new Vector<HierarchyNode>(pie.getChildren());
- pie = getNode("Pie", tmpchildren);
+ HierarchyNode food1 = getNode("Food", nodes0, 0); //get the first one
+ HierarchyNode food2 = getNode("Food", nodes0, 1); //get the second one
+ Vector<HierarchyNode> tmpchildren = new Vector<HierarchyNode>(food1.getChildren());
+ HierarchyNode pie1 = getNode("Pie", tmpchildren);
+ HierarchyNode pie = pie1;
+ HierarchyNode food = food2;
+ if (pie1 == null) { //order is undefined so we have to check and switch if necessary
+ tmpchildren = new Vector<HierarchyNode>(food2.getChildren());
+ HierarchyNode pie2 = getNode("Pie", tmpchildren);
+ pie = pie2;
+ food = food1;
+ }
assertNotNull(pie);
assertEquals(3, pie.getChildren().size());
Vector<HierarchyNode> pienodes = new Vector<HierarchyNode>();
@@ -293,8 +303,7 @@
String[] expfruit = {"Fruit Apple"};
testNodeResults(fruitnodes2, expfruit);
- HierarchyNode food = getNode("Food", nodes0, 1); //get the second one
- assertNotNull(food);
+ assertNotNull(food); //use earlier set food
assertEquals(2, food.getChildren().size());
Vector<HierarchyNode> foodnodes = new Vector<HierarchyNode>();
foodnodes.addAll(food.getChildren());
@@ -380,6 +389,14 @@
//
// }
+ private void testNodeHasSpace(Vector<HierarchyNode> nodes) {
+ for (HierarchyNode node : nodes) {
+ assertNotNull("node is null: " + node.getName(), node);
+ assertNotNull("node's page is null: " + node.getName(), node.getPage());
+ assertNotNull("node's spacekey is null: " + node.getName(), node.getPage().getSpacekey());
+ }
+ }
+
public void testBuildHierarchy_fixBranchNames() {
Properties props = tester.getProperties();
props.setProperty("spacekey", "food");
diff --git a/src/com/atlassian/uwc/ui/ConverterEngine.java b/src/com/atlassian/uwc/ui/ConverterEngine.java
index 2cad565..32a899d 100644
--- a/src/com/atlassian/uwc/ui/ConverterEngine.java
+++ b/src/com/atlassian/uwc/ui/ConverterEngine.java
@@ -12,6 +12,7 @@
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
+import java.util.Enumeration;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Hashtable;
@@ -91,6 +92,7 @@
private static final String ORPHAN_ATTACHMENTS_PAGE_TITLE="Orphan attachments";
private static final String DEFAULT_ATTACHMENT_UPLOAD_COMMENT = "Added by UWC, the Universal Wiki Converter";
public static final String PROPKEY_ENGINE_SAVES_TO_DISK = "engine-saves-to-disk";
+ private static final String PROPKEY_SPACEPERMS = "spaceperms";
/* START FIELDS */
public boolean running = false; //Methods check this to see if the conversion needs to be cancelled
@@ -875,8 +877,20 @@
* </ul>
*/
protected Page preserveHistory(Page page, String filename) {
- //get suffix
- String suffix = getPageHistorySuffix();
+ if (loadOnAncestors()) {
+ addAncestors(page);
+ if (!page.getAncestors().isEmpty()) {
+ page.setVersion(page.getLatestVersion()+1);
+ log.debug("Current page version: " + page.getVersion());
+ }
+ return page;
+ }
+ return identifyHistoryOnePage(page, filename);
+ }
+
+ public Page identifyHistoryOnePage(Page page, String filename) {
+ //get suffix
+ String suffix = getPageHistorySuffix();
if (suffix == null) {
log.error("Error attempting to preserve history: Page history suffix is Null.");
return page;
@@ -898,12 +912,69 @@
if (suffixReplacer.find()) {
String pagename = suffixReplacer.group(1);
String versionString = suffixReplacer.group(2);
- int version = Integer.parseInt(versionString);
page.setName(pagename); //set name before version so latestversion data is properly set in Page
- page.setVersion(version);
+ if (Boolean.parseBoolean(this.miscProperties.getProperty("page-history-sortwithtimestamp", "false")))
+ page.setTimestamp(new Date(Long.parseLong(versionString)*1000));
+ else
+ page.setVersion(Integer.parseInt(versionString));
}
return page;
}
+
+ /* Page History Load on Ancestors methods - START */
+ private boolean loadOnAncestors() {
+ return Boolean.parseBoolean(this.miscProperties.getProperty("page-history-load-as-ancestors", "false"));
+ }
+
+ private void addAncestors(Page page) {
+ String ancestorDir = this.miscProperties.getProperty("page-history-load-as-ancestors-dir", null);
+ if (ancestorDir == null) {
+ log.warn("page-history-load-as-ancestors-dir must be set. Cannot add ancestors.");
+ return;
+ }
+ String relPath = getPageRelativePath(page);
+ if (!ancestorDir.endsWith(File.separator) && !relPath.startsWith(File.separator))
+ ancestorDir += File.separator;
+ String ancestorPath = ancestorDir + relPath;
+ File dir = new File(ancestorPath);
+ File[] allFiles = dir.listFiles();
+ for (File file : allFiles) {
+ String filename = file.getName();
+ Page newPage = new VersionPage(file);
+ newPage.setParent(page);
+ newPage = identifyHistoryOnePage(newPage, filename);
+ if (newPage.getName() == null) continue;
+ if (newPage.getName().equalsIgnoreCase(page.getName().replaceFirst("[.][^.]+$", ""))) {
+ newPage.setName(page.getName()); //we need them to have the same name for latestversion to work
+ log.debug("Found ancestor page: " + newPage.getFile().getPath());
+ newPage.setPath(getPath(newPage.getFile().getPath()));
+ page.addAncestor((VersionPage) newPage);
+ }
+ }
+ if (!page.getAncestors().isEmpty() &&
+ Boolean.parseBoolean(this.miscProperties.getProperty("page-history-sortwithtimestamp", "false"))) {
+ if (Boolean.parseBoolean(this.miscProperties.getProperty("page-history-load-as-ancestors-lastiscurrent", "false"))) {
+ page.getAncestors().remove(page.getAncestors().lastElement());//remove the last ancestor if its the same as current
+ }
+ for (int i = 1; i < page.getAncestors().size(); i++) {
+ VersionPage version = page.getAncestors().get(i);
+ version.setVersion(version.getLatestVersion()+1);
+// log.debug("version latest version: " + version.getLatestVersion() + " (" + version.getName()+ " )");
+ }
+// log.debug("page latest version: " + page.getLatestVersion() + " (" + page.getName()+ " )");
+ page.setSortWithTimestamp(true); //affects sorting of collections of pages (including hierarchies)
+ }
+
+ }
+
+ protected String getPageRelativePath(Page page) {
+ String ignorable = this.miscProperties.getProperty("filepath-hierarchy-ignorable-ancestors", "");
+ String full = page.getPath();
+ if (full == null) return null;
+ return full.replaceAll("\\Q"+ignorable + "\\E", "");
+ }
+ /* Page History Load on Ancestors methods - END */
+
/**
* gets the pagename given the pagepath
@@ -928,7 +999,7 @@
* @param converters
* @return true if conversion of all pages succeeded
*/
- protected boolean convertPages(List<Page> pages, List<Converter> converters) {
+ protected boolean convertPages(List pages, List<Converter> converters) {
return convertPages(pages, converters, "Converting pages...");
}
@@ -976,13 +1047,16 @@
this.feedback = Feedback.CANCELLED;
return false;
}
+ if (page.getAncestors() != null && !page.getAncestors().isEmpty()) {
+ convertPages(page.getAncestors(), converters);
+ }
}
//still more bookkeeping
conversionBookkeepingEndAll(pages, converters);
return result;
}
-
+
/**
* make some log entries about the time it took to convert a page
* @param startTimeStamp
@@ -1049,19 +1123,10 @@
else if (page.getOriginalText() == null){
try {
String pageContents = "";
- if (changingEncoding()) {
- String encoding = getEncoding();
- byte[] pagebytes = FileUtils.getBytesFromFile(file);
- try {
- pageContents = new String(pagebytes, encoding);
- } catch (UnsupportedEncodingException e) {
- String baseerror = "Could not encode file with encoding: " + encoding + ".";
- log.error(baseerror + " Using utf-8.");
- this.errors.addError(Feedback.BAD_SETTING, baseerror, true);
- pageContents = new String(pagebytes, "utf-8");
- }
+ if (isGzip() && page instanceof VersionPage) {
+ pageContents = getGzipText(file);
}
- else pageContents = FileUtils.readTextFile(file);
+ else pageContents = getAsciiText(file);
page.setOriginalText(pageContents);
} catch (IOException e) {
String message = "Could not read file " + file.getAbsolutePath() + ".\n" +
@@ -1077,6 +1142,36 @@
return file;
}
+ private boolean isGzip() {
+ return Boolean.parseBoolean(this.miscProperties.getProperty("page-history-load-as-ancestors-isgzip", "false"));
+ }
+
+ private String getGzipText(File file) throws IOException {
+ if (changingEncoding()) {
+ log.error("Changing Encoding from Gzip file is not supported yet! Can't change encoding");
+ }
+ return FileUtils.readGzipFile(file);
+ }
+
+ public String getAsciiText(File file) throws IOException,
+ UnsupportedEncodingException {
+ String pageContents;
+ if (changingEncoding()) {
+ String encoding = getEncoding();
+ byte[] pagebytes = FileUtils.getBytesFromFile(file);
+ try {
+ pageContents = new String(pagebytes, encoding);
+ } catch (UnsupportedEncodingException e) {
+ String baseerror = "Could not encode file with encoding: " + encoding + ".";
+ log.error(baseerror + " Using utf-8.");
+ this.errors.addError(Feedback.BAD_SETTING, baseerror, true);
+ pageContents = new String(pagebytes, "utf-8");
+ }
+ }
+ else pageContents = FileUtils.readTextFile(file);
+ return pageContents;
+ }
+
private boolean changingEncoding() {
if (this.miscProperties != null)
return this.miscProperties.containsKey("encoding");
@@ -1244,14 +1339,15 @@
* @param pages The pages to output.
* @param spacekey space to which the pages will be written
*/
- protected void writePages(List<Page> pages, String spacekey) {
+ protected void writePages(List pages, String spacekey) {
String note = "Uploading Pages to Confluence...";
this.state.updateNote(note);
log.info(note);
int numUploaded = 0;
+ List<Page> casted = (List<Page>) pages;
// at last, write the pages to Confluence!
- for (Page page : pages) {
+ for (Page page : casted) {
this.state.updateProgress();
if (!this.running) {
this.feedback = Feedback.CANCELLED;
@@ -1587,6 +1683,7 @@
* @throws IllegalArgumentException if a confluenceSetting is invalid
*/
protected String sendPage(Page page, String parentId, UWCUserSettings settings) {
+ //write current page
//XXX why are we setting these up every page. Most of these are global.
//XXX If we set these up earlier in the process, we could do the checkConfluenceSettings call
//(currently in the next sendPage) earlier in the process as well
@@ -1622,6 +1719,44 @@
return sendPage(page, parentId, confSettings);
}
+ Pattern spacepermPattern = Pattern.compile("[{]groupname[}](.*?)[{]permissions[}](.*)");
+ private void updateSpacePermissions(ConfluenceServerSettings confSettings) {
+ if (!this.miscProperties.containsKey(PROPKEY_SPACEPERMS)) return;
+
+ String allperms = null;
+ String groupname = null;
+ Vector<String> perms = new Vector<String>();
+ String spaceperms = this.miscProperties.getProperty(PROPKEY_SPACEPERMS);
+ boolean addgroup = Boolean.parseBoolean(this.miscProperties.getProperty("spaceperms-addgroup", "true"));
+ Matcher permsFinder = spacepermPattern.matcher(spaceperms);
+ if (permsFinder.find()) {
+ groupname = permsFinder.group(1);
+ allperms = permsFinder.group(2);
+ String[] permsArray = allperms.split(",");
+ for (String perm : permsArray) {
+ perms.add(perm);
+ }
+ }
+ if (groupname != null && !perms.isEmpty()) {
+ RemoteWikiBroker broker = RemoteWikiBroker.getInstance();
+ try {
+ if (addgroup && !broker.hasGroup(confSettings, groupname)) {
+ log.info("Adding group: " + groupname);
+ broker.addGroup(confSettings, groupname);
+ }
+ log.debug("Updating permissions...");
+ broker.addPermissionsToSpace(confSettings, perms, groupname);
+ log.info("Updated permissions for group: " + groupname + " in space " + confSettings.getSpaceKey());
+ } catch (Exception e) {
+ String message = "Could not update permissions ('"+allperms+"') for groupname: '" + groupname +"'";
+ getErrors().addError(Feedback.REMOTE_API_ERROR,
+ message,
+ true);
+ log.error(message,e);
+ }
+ }
+ }
+
public ConfluenceServerSettings getConfluenceServerSettings(
UWCUserSettings settings) {
ConfluenceServerSettings confSettings = new ConfluenceServerSettings();
@@ -1680,6 +1815,8 @@
Vector newspacepages = broker.getAllServerPageSummaries(confSettings, space.getSpaceKey());
PageForXmlRpc newhome = (PageForXmlRpc) newspacepages.get(0); //should only be one at this point
this.homepages.put(space.getSpaceKey(), newhome.getId());
+ //check to see if we're setting any permissions
+ updateSpacePermissions(confSettings);
} catch (Exception e) {
getErrors().addError(Feedback.BAD_LOGIN,
"Could not create space: " + spacekey +
@@ -1775,6 +1912,7 @@
}
//move the page if necessary and you can
+ log.debug("Identifying parent location for page...");
String parentid = null;
if (pageTable.containsKey("parentId"))
parentid = (String) pageTable.get("parentId");
@@ -1785,6 +1923,7 @@
}
else { //can we find the home page for this space?
try {
+ log.debug("Identifying Homepage for spacekey: " + confSettings.spaceKey);
SpaceForXmlRpc space = broker.getSpace(confSettings, confSettings.spaceKey);
parentid = space.getSpaceParams().get("homePage");
this.homepages.put(confSettings.spaceKey, parentid);
@@ -1796,6 +1935,7 @@
}
if (parentid != null) {
+ log.debug("Attempting to set parent to: " + parentid);
try {
broker.movePage(confSettings, newPage.getId(), parentid, RemoteWikiBroker.Position.APPEND);
} catch (Exception e) {
@@ -1874,6 +2014,10 @@
Hashtable pageTable = createPageTable(page, parentId);
//check for problems with settings
checkConfluenceSettings(confSettings); //XXX Why are we doing this for every page? 'cause we seem to create the confSettings on a page by page basis?
+ //write ancestors, if any, first
+ if (page.getAncestors() != null && !page.getAncestors().isEmpty()) {
+ pageTable = handleAncestors(page, confSettings, pageTable);
+ }
//send page
String id = null;
if (page.isBlog()) {
@@ -1891,14 +2035,46 @@
//send comments
sendComments(page, broker, id, confSettings);
//set author
+ log.debug("Page Version: " + page.getVersion());
sendAuthor(page, broker, id, confSettings);
//set timestamp
sendTimestamp(page, broker, id, confSettings);
//return the page id
return id;
}
+
+ private Hashtable handleAncestors(Page page,
+ ConfluenceServerSettings confSettings, Hashtable pageTable) {
+ enforceAncestorTitleAndKey(page.getAncestors(), page.getName(), page.getSpacekey(), page.isBlog());
+ if (page.isBlog()) { //get the blog id to make certain all ancestors and current page are made the same CEO
+ Page first = page.getAncestors().remove(0);
+ String blogid = sendPage(first, null, confSettings);
+ enforceBlogId(page, page.getAncestors(), blogid);
+ pageTable.put("id", blogid);
+ }
+ writePages(page.getAncestors(), settings.getSpace());
+ return pageTable;
+ }
- public String markupToXhtml(String markup) {
+ private void enforceBlogId(Page page, Vector<VersionPage> pages,
+ String blogid) {
+ page.setId(blogid);
+ for (VersionPage anc : pages) {
+ anc.setId(blogid);
+ }
+ }
+
+ private void enforceAncestorTitleAndKey(Vector<VersionPage> pages,
+ String name, String spacekey, boolean isBlog) {
+ for (VersionPage page : pages) {
+ page.setName(name);
+ page.setSpacekey(spacekey);
+ page.setIsBlog(isBlog);
+ }
+
+ }
+
+ public String markupToXhtml(String markup) {
RemoteWikiBroker broker = RemoteWikiBroker.getInstance();
ConfluenceServerSettings confSettings = getConfluenceServerSettings(this.settings);
try {
@@ -1937,6 +2113,7 @@
table.put("title", page.getName());
if (parentId != null && !parentId.equals("null")) table.put("parentId", parentId);
if (page.getVersion() > 0) table.put("version", page.getVersion() + "");
+ if (page.isBlog() && page.getId() != null) table.put("id", page.getId());
return table;
}
@@ -2042,7 +2219,7 @@
this.errors.addError(Feedback.REMOTE_API_ERROR, errorMessage, true);
}
}
- else log.debug("Page has no comments."); //DELETE
+// else log.debug("Page has no comments."); //DELETE
}
public String getContentAsXhtmlFormat(RemoteWikiBroker broker, ConfluenceServerSettings confSettings, String text) throws XmlRpcException, IOException {
@@ -2071,7 +2248,7 @@
private void sendTimestamp(Page page, RemoteWikiBroker broker, String id, ConfluenceServerSettings confSettings) {
if (page.getTimestamp() != null) {
- log.debug("Sending timestamp data.");
+ log.debug("Sending timestamp data: " + page.getTimestamp());
try {
DateFormat dateFormat = new SimpleDateFormat("yyyy:MM:dd:HH:mm:ss:SS"); //XXX Settable?
if (this.miscProperties.getProperty("user-timezone", null) != null) {
diff --git a/src/com/atlassian/uwc/ui/ConverterEngineTest.java b/src/com/atlassian/uwc/ui/ConverterEngineTest.java
index f5904b7..f58d6f2 100644
--- a/src/com/atlassian/uwc/ui/ConverterEngineTest.java
+++ b/src/com/atlassian/uwc/ui/ConverterEngineTest.java
@@ -2917,6 +2917,176 @@
}
+ public void testPreserveHistories_LoadAsAncestors() throws IOException {
+ //create stub page
+ String currentPageFilename = "page.txt";
+ String path = "sampleData/engine/history/pages/foo/" + currentPageFilename;
+ File file1 = new File(path);
+ Page page1 = new Page(file1);
+ page1.setName("page");
+ String content = FileUtils.readTextFile(file1);
+ page1.setOriginalText(content);
+ page1.setConvertedText(content);
+ String pagePath = file1.getPath();
+ if (pagePath.lastIndexOf(File.separator) >= 0)
+ pagePath = pagePath.substring(0, pagePath.lastIndexOf(File.separator));
+ else pagePath = "";
+ page1.setPath(pagePath);
+
+ //path to directories with same file structure
+ String ignorable = "sampleData/engine/history/pages/";
+ String ancestorpath = "sampleData/engine/history/ancestors/";
+
+ //test what happens when we pass in the page without the load-as-ancestors properties
+ Page test1 = tester.preserveHistory(page1, currentPageFilename);
+ assertNotNull(test1.getAncestors());
+ assertTrue(test1.getAncestors().isEmpty());
+ assertEquals(1, test1.getVersion());
+
+ //set up the properties
+ tester.handlePageHistoryProperty("wiki.switch.page-history-preservation", "true");
+ tester.handlePageHistoryProperty("wiki.suffix.page-history-preservation", "[.]#[.]txt");
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-load-as-ancestors.property", "true");
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-load-as-ancestors-dir.property", ancestorpath);
+ tester.handleMiscellaneousProperties("wiki.0000.filepath-hierarchy-ignorable-ancestors.property", ignorable);
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-sortwithtimestamp.property", "true");
+
+ Page actual = tester.preserveHistory(page1, currentPageFilename);
+ assertNotNull(actual.getAncestors());
+ assertFalse(actual.getAncestors().isEmpty());
+ assertEquals(3, actual.getAncestors().size());
+ assertEquals(4, actual.getVersion());
+ assertEquals("page", actual.getName());
+ assertEquals("Test Current\n", actual.getOriginalText());
+
+ Vector<VersionPage> ancestors = actual.getAncestors();
+ Page anc0 = ancestors.get(0);
+ assertNotNull(anc0);
+ assertEquals("page", anc0.getName());
+ assertEquals(1, anc0.getVersion());
+ assertTrue(anc0.getFile().getName().endsWith("0.txt"));
+
+ Page anc1 = ancestors.get(1);
+ assertNotNull(anc1);
+ assertEquals("page", anc1.getName());
+ assertEquals(2, anc1.getVersion());
+ assertTrue(anc1.getFile().getName().endsWith("1.txt"));
+
+ Page anc2 = ancestors.get(2);
+ assertNotNull(anc2);
+ assertEquals("page", anc2.getName());
+ assertEquals(3, anc2.getVersion());
+ assertTrue(anc2.getFile().getName().endsWith("2.txt"));
+ }
+
+
+ public void testPreserveHistories_LoadAsAncestorsConvertPage() throws IOException {
+ //create stub page
+ String currentPageFilename = "page.txt";
+ String path = "sampleData/engine/history/pages/foo/" + currentPageFilename;
+ File file1 = new File(path);
+ Page page1 = new Page(file1);
+ page1.setName("page");
+ String content = FileUtils.readTextFile(file1);
+ page1.setOriginalText(content);
+ page1.setConvertedText(content);
+ String pagePath = file1.getPath();
+ if (pagePath.lastIndexOf(File.separator) >= 0)
+ pagePath = pagePath.substring(0, pagePath.lastIndexOf(File.separator));
+ else pagePath = "";
+ page1.setPath(pagePath);
+ List<String> input = new ArrayList<String>();
+ input.add("Test.0001.test.java-regex=Test{replace-with}Testing");
+ List<Converter> converters = tester.createConverters(input);
+
+ //path to directories with same file structure
+ String ignorable = "sampleData/engine/history/pages/";
+ String ancestorpath = "sampleData/engine/history/ancestors/";
+
+ //set up the properties
+ tester.handlePageHistoryProperty("wiki.switch.page-history-preservation", "true");
+ tester.handlePageHistoryProperty("wiki.suffix.page-history-preservation", "[.]#[.]txt");
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-load-as-ancestors.property", "true");
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-load-as-ancestors-dir.property", ancestorpath);
+ tester.handleMiscellaneousProperties("wiki.0000.filepath-hierarchy-ignorable-ancestors.property", ignorable);
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-sortwithtimestamp.property", "true");
+ tester.preserveHistory(page1, currentPageFilename);
+
+ List<Page> pages = new Vector<Page>();
+ pages.add(page1);
+
+ assertTrue(tester.convertPages(pages, converters));
+ Page actual = pages.get(0);
+
+ assertNotNull(actual.getAncestors());
+ assertFalse(actual.getAncestors().isEmpty());
+ assertEquals("Testing Current\n", actual.getOriginalText());
+
+ Vector<VersionPage> ancestors = (Vector<VersionPage>) actual.getAncestors();
+ Page anc0 = ancestors.get(0);
+ assertNotNull(anc0);
+ assertEquals("Testing 0\n", anc0.getOriginalText());
+
+ Page anc1 = ancestors.get(1);
+ assertNotNull(anc1);
+ assertEquals("Testing 1\n", anc1.getOriginalText());
+
+ Page anc2 = ancestors.get(2);
+ assertNotNull(anc2);
+ assertEquals("Testing 2\n", anc2.getOriginalText());
+ }
+
+ public void testPreserveHistories_LoadAsAncestorsGZipProp() throws IOException {
+ //create stub page
+ String currentPageFilename = "page.txt";
+ String path = "sampleData/engine/history/pages/gzip/" + currentPageFilename;
+ File file1 = new File(path);
+ Page page1 = new Page(file1);
+ page1.setName("page");
+ String content = FileUtils.readTextFile(file1);
+ page1.setOriginalText(content);
+ page1.setConvertedText(content);
+ String pagePath = file1.getPath();
+ if (pagePath.lastIndexOf(File.separator) >= 0)
+ pagePath = pagePath.substring(0, pagePath.lastIndexOf(File.separator));
+ else pagePath = "";
+ page1.setPath(pagePath);
+ List<String> input = new ArrayList<String>();
+ input.add("Test.0001.test.java-regex=Test{replace-with}Testing");
+ List<Converter> converters = tester.createConverters(input);
+
+ //path to directories with same file structure
+ String ignorable = "sampleData/engine/history/pages/";
+ String ancestorpath = "sampleData/engine/history/ancestors/";
+
+ //set up the properties
+ tester.handlePageHistoryProperty("wiki.switch.page-history-preservation", "true");
+ tester.handlePageHistoryProperty("wiki.suffix.page-history-preservation", "[.]#[.]txt");
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-load-as-ancestors.property", "true");
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-load-as-ancestors-dir.property", ancestorpath);
+ tester.handleMiscellaneousProperties("wiki.0000.filepath-hierarchy-ignorable-ancestors.property", ignorable);
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-load-as-ancestors-isgzip.property", "true");
+ tester.handleMiscellaneousProperties("wiki.0000.page-history-sortwithtimestamp.property", "true");
+ tester.preserveHistory(page1, currentPageFilename);
+ assertFalse(page1.getAncestors().isEmpty());
+
+ List<Page> pages = new Vector<Page>();
+ pages.add(page1);
+
+ assertTrue(tester.convertPages(pages, converters));
+ Page actual = pages.get(0);
+ assertEquals("Original\n", actual.getOriginalText());
+
+ assertNotNull(actual.getAncestors());
+ assertFalse(actual.getAncestors().isEmpty());
+
+ Vector<VersionPage> ancestors = actual.getAncestors();
+ Page anc0 = ancestors.get(0);
+ assertNotNull(anc0);
+ assertEquals("Tralalala\n", anc0.getOriginalText());
+
+ }
+
public void testIsHierarchy() {
String input = "Mywiki.0001.switch.hierarchy-builder=UseBuilder";
String[] inputs = input.split("=");
@@ -3629,6 +3799,39 @@
}
+ public void testCreateSpace_permissions() throws XmlRpcException, IOException {
+ Page page = new Page(null);
+ page.setName("Test1");
+ page.setOriginalText("123");
+ page.setConvertedText("234");
+ String spacekey = "foo";
+ page.setSpacekey(spacekey);
+ RemoteWikiBroker broker = RemoteWikiBroker.getInstance();
+ ConfluenceServerSettings settings = new ConfluenceServerSettings();
+ String testpropslocation = "test.basic.properties";
+ loadSettingsFromFile(settings, testpropslocation);
+ try {
+ SpaceForXmlRpc space = broker.getSpace(settings, spacekey);
+ assertNull(space);
+ } catch (Exception e) { }
+
+ tester.handleMiscellaneousProperties("test.001.spaceperms-foo-users.property",
+ "VIEWSPACE,EDITSPACE,REMOVEPAGE,EDITBLOG,COMMENT,REMOVECOMMENT,CREATEATTACHMENT,EXPORTSPACE,SETSPACEPERMISSIONS");
+
+ tester.sendPage(page, null, basesettings);
+ //TODO: We don't have a way to test this other than manually, so to test:
+ //put a breakpoint here and examine the space and its permissions
+ try {
+ SpaceForXmlRpc space = broker.getSpace(settings, spacekey);
+ assertNotNull(space);
+ } catch (Exception e1) {
+ e1.printStackTrace();
+ } finally {
+ deleteSpace(spacekey, settings); //cleanup
+ }
+
+ }
+
private void deleteSpace(String space, ConfluenceServerSettings confSettings) throws XmlRpcException, IOException {
confSettings.url = confSettings.url.replaceFirst("https?://", "");
diff --git a/src/com/atlassian/uwc/ui/FileUtils.java b/src/com/atlassian/uwc/ui/FileUtils.java
index eb10c84..7351ca0 100644
--- a/src/com/atlassian/uwc/ui/FileUtils.java
+++ b/src/com/atlassian/uwc/ui/FileUtils.java
@@ -4,6 +4,7 @@
import java.io.*;
import java.nio.charset.Charset;
+import java.util.zip.GZIPInputStream;
/**
* A couple of random file functions that are used by the engine.
@@ -64,6 +65,22 @@
return readTextFile(inputFile, charset);
}
+ public static String readGzipFile(File file) throws IOException {
+ FileInputStream fis = new FileInputStream(file);
+ GZIPInputStream gis = new GZIPInputStream(fis);
+ InputStreamReader isr = new InputStreamReader(gis);
+ BufferedReader reader = new BufferedReader(isr);
+
+ StringBuffer contents = new StringBuffer();
+ String line;
+ String separator = System.getProperty("line.separator");
+ while (( line = reader.readLine()) != null){
+ contents.append(line).append(separator);
+ }
+ fis.close();
+ isr.close();
+ return contents.toString();
+ }
/**
* Creates or truncates a file and then writes a string to it.
diff --git a/src/com/atlassian/uwc/ui/Page.java b/src/com/atlassian/uwc/ui/Page.java
index c1b8710..9a62b1d 100644
--- a/src/com/atlassian/uwc/ui/Page.java
+++ b/src/com/atlassian/uwc/ui/Page.java
@@ -93,8 +93,24 @@
private boolean isPersonalSpace = false;
private String personalSpaceUsername = null;
+ /**
+ * If the page history framework is using the load-as-ancestors properties, then we load the ancestor versions of
+ * the page into this object. Useful for interacting more easily with existing hierarchies.
+ */
+ private Vector<VersionPage> ancestors;
/**
+ * confluence entity id. useful for updating blogs.
+ */
+ private String id;
+
+ /**
+ * set by the engine, used by the compareTo method
+ */
+ private boolean sortWithTimestamp = false;
+
+
+ /**
* Basic constructor. Creates a page with an empty path.
* @param file The file to be converted.
*/
@@ -145,8 +161,9 @@
//order by name - if name is the same, in order by version
int compareValue = (nameA.compareTo(nameB));
- if (compareValue == 0)
- compareValue = versionA - versionB;
+ if (compareValue == 0) {
+ if (!sortWithTimestamp) compareValue = versionA - versionB;
+ }
return compareValue;
}
@@ -280,6 +297,11 @@
if (latest == null) return 1;
return latest;
}
+
+
+ public int getLatestVersion() {
+ return getLatestVersion(getName());
+ }
public Set<String> getLabels() {
return labels;
@@ -433,4 +455,45 @@
this.personalSpaceUsername = username;
}
+ public void addAncestor(VersionPage ancestor) {
+ getAncestors().add(ancestor);
+ }
+
+ public Vector<VersionPage> getAncestors() {
+ if (this.ancestors == null)
+ this.ancestors = new Vector<VersionPage>();
+ return this.ancestors;
+ }
+
+ public void setParent(Page page) {
+ throw new IllegalStateException("Use VersionPage if you wish to set the parent.");
+ }
+ public Page getParent() {
+ return null;
+ }
+ public boolean sameTimestampAndContent(Page page) {
+ boolean content = (this.getConvertedText() != null
+ && this.getConvertedText().equals(page.getConvertedText()));
+ boolean timeisnull = (this.getTimestamp() == null && page.getTimestamp() == null);
+ boolean time = (this.getTimestamp() != null
+ && page.getTimestamp() != null
+ && this.getTimestamp().getTime() == page.getTimestamp().getTime());
+ return content && (timeisnull || time);
+ }
+
+ public String getId() {
+ return id;
+ }
+
+ public void setId(String id) {
+ this.id = id;
+ }
+
+ public boolean isSortWithTimestamp() {
+ return sortWithTimestamp;
+ }
+
+ public void setSortWithTimestamp(boolean sortWithTimestamp) {
+ this.sortWithTimestamp = sortWithTimestamp;
+ }
}
diff --git a/src/com/atlassian/uwc/ui/VersionPage.java b/src/com/atlassian/uwc/ui/VersionPage.java
new file mode 100644
index 0000000..23be9d5
--- /dev/null
+++ b/src/com/atlassian/uwc/ui/VersionPage.java
@@ -0,0 +1,49 @@
+package com.atlassian.uwc.ui;
+
+import java.io.File;
+import java.util.Vector;
+
+import org.apache.log4j.Logger;
+
+public class VersionPage extends Page {
+
+ private Page parent;
+ Logger log = Logger.getLogger(this.getClass());
+ public VersionPage(File file, String path) {
+ super(file, path);
+ }
+
+ public VersionPage(File file) {
+ super(file);
+ }
+
+ public void addAncestor(VersionPage ancestor) {
+ throw new IllegalArgumentException("VersionPage object does not accept ancestors");
+ }
+
+ public Vector<VersionPage> getAncestors() {
+ return null;
+ }
+
+ public void setParent(Page page) {
+ this.parent = page;
+ }
+
+ public Page getParent() {
+ return this.parent;
+ }
+
+ @Override
+ public int compareTo(Object o) {
+ if (o instanceof Page) {
+ Page p = (Page) o;
+ if (this.getTimestamp() != null) {
+ return this.getTimestamp().compareTo(p.getTimestamp());
+ }
+ return super.compareTo(p);
+ }
+ else
+ return super.compareTo(o);
+ }
+
+}