[packages/mono-tools] - added am patch (fixes for automake 1.12+) - added mono3 patch from git - still doesn't build with

qboosh qboosh at pld-linux.org
Wed Oct 2 19:15:19 CEST 2013


commit c436faf8c58232bc42eccbf8dbaebc06739eb251
Author: Jakub Bogusz <qboosh at pld-linux.org>
Date:   Wed Oct 2 19:16:06 2013 +0200

    - added am patch (fixes for automake 1.12+)
    - added mono3 patch from git
    - still doesn't build with mono 3.2

 mono-tools-am.patch    |  72 +++++
 mono-tools-mono3.patch | 741 +++++++++++++++++++++++++++++++++++++++++++++++++
 mono-tools.spec        |   8 +-
 3 files changed, 819 insertions(+), 2 deletions(-)
---
diff --git a/mono-tools.spec b/mono-tools.spec
index 261d212..3c5230c 100644
--- a/mono-tools.spec
+++ b/mono-tools.spec
@@ -1,3 +1,4 @@
+# NOTE: 2.11 tarball is broken
 #
 # Conditional build:
 %bcond_with	gecko		# don't build gecko html renderer
@@ -10,11 +11,12 @@ Version:	2.10
 Release:	3
 License:	GPL v2+
 Group:		Development/Tools
-# latest downloads summary at http://ftp.novell.com/pub/mono/sources-stable/
-Source0:	http://ftp.novell.com/pub/mono/sources/mono-tools/%{name}-%{version}.tar.bz2
+Source0:	http://download.mono-project.com/sources/mono-tools/%{name}-%{version}.tar.bz2
 # Source0-md5:	da178df2c119c696c08c09dc9eb01994
 Patch0:		%{name}-pwd.patch
 Patch1:		%{name}-configure.patch
+Patch2:		%{name}-am.patch
+Patch3:		%{name}-mono3.patch
 URL:		http://www.mono-project.com/
 BuildRequires:	autoconf
 BuildRequires:	automake
@@ -152,6 +154,8 @@ zawartości.
 %setup -q
 %patch0 -p1
 %patch1 -p1
+%patch2 -p1
+%patch3 -p1
 
 # as expected by ilcontrast script
 %{__sed} -i -e 's,\$(libdir)/ilcontrast,$(prefix)/lib/ilcontrast,' ilcontrast/Makefile.am
diff --git a/mono-tools-am.patch b/mono-tools-am.patch
new file mode 100644
index 0000000..9cce9d2
--- /dev/null
+++ b/mono-tools-am.patch
@@ -0,0 +1,72 @@
+--- mono-tools-2.10/Mono.Profiler/Mono.Profiler.Widgets/Makefile.am.orig	2011-02-12 17:32:47.000000000 +0100
++++ mono-tools-2.10/Mono.Profiler/Mono.Profiler.Widgets/Makefile.am	2013-09-29 21:57:59.331555089 +0200
+@@ -9,7 +9,8 @@
+ ASSEMBLY_MDB = 
+ endif
+ 
+-pkglib_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
++assdir = $(pkglibdir)
++ass_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ 
+ CLEANFILES = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ 
+--- mono-tools-2.10/Mono.Profiler/heap-snapshot-explorer/Makefile.am.orig	2011-02-12 17:32:47.000000000 +0100
++++ mono-tools-2.10/Mono.Profiler/heap-snapshot-explorer/Makefile.am	2013-09-29 21:59:02.694493316 +0200
+@@ -9,7 +9,8 @@
+ ASSEMBLY_MDB = 
+ endif
+ 
+-pkglib_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
++assdir = $(pkglibdir)
++ass_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ 
+ CLEANFILES = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ 
+--- mono-tools-2.10/Mono.Profiler/heap-snapshot-viewer/Makefile.am.orig	2011-02-12 17:32:47.000000000 +0100
++++ mono-tools-2.10/Mono.Profiler/heap-snapshot-viewer/Makefile.am	2013-09-29 21:59:27.985666109 +0200
+@@ -8,7 +8,8 @@
+ ASSEMBLY_MDB = 
+ endif
+ 
+-pkglib_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
++assdir = $(pkglibdir)
++ass_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ bin_SCRIPTS = mprof-heap-viewer
+ man_MANS=man/man1/mprof-heap-viewer.1
+ 
+--- mono-tools-2.10/Mono.Profiler/mprof-gui/Makefile.am.orig	2011-02-12 17:32:47.000000000 +0100
++++ mono-tools-2.10/Mono.Profiler/mprof-gui/Makefile.am	2013-09-29 22:00:02.557269251 +0200
+@@ -8,7 +8,8 @@
+ ASSEMBLY_MDB = 
+ endif
+ 
+-pkglib_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
++assdir = $(pkglibdir)
++ass_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ bin_SCRIPTS = emveepee
+ 
+ CLEANFILES = $(ASSEMBLY) $(ASSEMBLY_MDB)
+--- mono-tools-2.10/Mono.Profiler/profiler-decoder-library/Makefile.am.orig	2011-02-12 17:32:47.000000000 +0100
++++ mono-tools-2.10/Mono.Profiler/profiler-decoder-library/Makefile.am	2013-09-30 18:44:47.911443506 +0200
+@@ -9,7 +9,8 @@
+ ASSEMBLY_MDB = 
+ endif
+ 
+-pkglib_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
++assdir = $(pkglibdir)
++ass_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ 
+ CLEANFILES = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ 
+--- mono-tools-2.10/Mono.Profiler/profiler-file-decoder/Makefile.am.orig	2011-02-12 17:32:47.000000000 +0100
++++ mono-tools-2.10/Mono.Profiler/profiler-file-decoder/Makefile.am	2013-09-30 18:45:34.606945410 +0200
+@@ -9,7 +9,8 @@
+ ASSEMBLY_MDB = 
+ endif
+ 
+-pkglib_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
++assdir = $(pkglibdir)
++ass_DATA = $(ASSEMBLY) $(ASSEMBLY_MDB)
+ bin_SCRIPTS = mprof-decoder
+ man_MANS = man/man1/mprof-decoder.1 
+ 
diff --git a/mono-tools-mono3.patch b/mono-tools-mono3.patch
new file mode 100644
index 0000000..75ebb44
--- /dev/null
+++ b/mono-tools-mono3.patch
@@ -0,0 +1,741 @@
+diff --git a/docbrowser/Makefile.am b/docbrowser/Makefile.am
+index a1a2626..065c430 100644
+--- a/docbrowser/Makefile.am
++++ b/docbrowser/Makefile.am
+@@ -40,6 +40,7 @@ browser_sources   = \
+ 	$(srcdir)/list.cs 		\
+ 	$(srcdir)/elabel.cs 		\
+ 	$(srcdir)/history.cs 		\
++	$(srcdir)/editing.cs            \
+ 	$(srcdir)/Contributions.cs	\
+ 	$(srcdir)/XmlNodeWriter.cs	\
+ 	$(srcdir)/IHtmlRender.cs	\
+@@ -67,7 +68,7 @@ admin_sources = \
+ 	$(srcdir)/admin.cs		\
+ 	$(srcdir)/Contributions.cs
+ 
+-browser_assemblies = $(GTK_SHARP_LIBS) $(MONODOC_LIBS) $(GNOME_SHARP_LIBS) -r:System.Web.Services
++browser_assemblies = $(GTK_SHARP_LIBS) $(MONODOC_LIBS) $(GNOME_SHARP_LIBS) -r:System.Web.Services -r:System.Web
+ # we insert gtkhtml libs if we have them for printing 
+ geckorender_assemblies = $(GTK_SHARP_LIBS) $(GTKHTML_SHARP_LIBS) $(GECKO_SHARP_LIBS) $(GNOME_SHARP_LIBS) $(MONODOC_LIBS) -r:browser.exe
+ gtkhtmlrender_assemblies = $(GTK_SHARP_LIBS) $(GTKHTML_SHARP_LIBS) $(GNOME_SHARP_LIBS) $(MONODOC_LIBS) -r:browser.exe
+diff --git a/docbrowser/browser.cs b/docbrowser/browser.cs
+index 2984bc7..6f70620 100644
+--- a/docbrowser/browser.cs
++++ b/docbrowser/browser.cs
+@@ -60,7 +60,7 @@ class Driver {
+ 				v => sources.Add (v) },
+ 			{ "edit=",
+ 				"Edit mdoc(5) XML documentation found within {PATH}.",
+-				v => RootTree.UncompiledHelpSources.Add (v) },
++				v => RootTree.AddUncompiledSource (v) },
+ 			{ "engine=",
+ 				"Specify which HTML rendering {ENGINE} to use:\n" + 
+ 					"  " + string.Join ("\n  ", engines) + "\n" +
+@@ -132,7 +132,7 @@ class Driver {
+ 			return r;
+ 		}
+ 
+-		if (mergeConfigFile != null) {
++		/*if (mergeConfigFile != null) {
+ 			ArrayList targetDirs = new ArrayList ();
+ 			
+ 			for (int i = 0; i < topics.Count; i++)
+@@ -145,7 +145,7 @@ class Driver {
+ 
+ 			e.Merge ();
+ 			return 0;
+-		}
++		}*/
+ 		
+ 		if (r != 0 || !show_gui)
+ 			return r;
+@@ -257,6 +257,7 @@ public class Browser {
+ 	TreeView search_tree;
+ 	TreeStore search_store;
+ 	SearchableIndex search_index;
++	ArrayList searchResults = new ArrayList (20);
+ 	string highlight_text;
+ 	[Glade.Widget] VBox search_vbox;
+ 	ProgressPanel ppanel;
+@@ -578,6 +579,7 @@ public class Browser {
+ 		Result r = search_index.Search (term);
+ 		if (r == null)
+ 			return; //There was a problem with the index
++		searchResults.Add (r);
+ 		//insert the results in the tree
+ 		TreeIter iter;
+ 					
+@@ -622,7 +624,7 @@ public class Browser {
+ 			return;
+ 		int i_0 = p.Indices [0];
+ 		int i_1 = p.Indices [1];
+-		Result res = (Result) search_index.Results [i_0];
++		Result res = (Result) searchResults [i_0];
+ 		TreeIter parent;
+ 		model.IterParent (out parent, iter);
+ 		string term = (string) search_store.GetValue (parent, 0);
+@@ -648,21 +650,21 @@ public class Browser {
+ 	void TextLarger (object obj, EventArgs args)
+ 	{
+ 		SettingsHandler.Settings.preferred_font_size += 10;
+-		HelpSource.CssCode = null;
++		//HelpSource.CssCode = null;
+ 		Reload ();
+ 		SettingsHandler.Save ();
+ 	}
+ 	void TextSmaller (object obj, EventArgs args)
+ 	{
+ 		SettingsHandler.Settings.preferred_font_size -= 10;
+-		HelpSource.CssCode = null;
++		//HelpSource.CssCode = null;
+ 		Reload ();
+ 		SettingsHandler.Save ();
+ 	}
+ 	void TextNormal (object obj, EventArgs args)
+ 	{
+ 		SettingsHandler.Settings.preferred_font_size = 100;
+-		HelpSource.CssCode = null;
++		//HelpSource.CssCode = null;
+ 		Reload ();
+ 		SettingsHandler.Save ();
+ 	}
+@@ -794,8 +796,8 @@ public class Browser {
+ 			//
+ 			string tabTitle;
+ 			tabTitle = matched_node.Caption; //Normal title
+-			string[] parts = matched_node.URL.Split('/', '#');
+-			if(matched_node.URL != null && matched_node.URL.StartsWith("ecma:")) {
++			string[] parts = matched_node.PublicUrl.Split('/', '#');
++			if(matched_node.PublicUrl != null && matched_node.PublicUrl.StartsWith("ecma:")) {
+ 				if(parts.Length == 3 && parts[2] != String.Empty) { //List of Members, properties, events, ...
+ 					tabTitle = parts[1] + ": " + matched_node.Caption;
+ 				} else if(parts.Length >= 4) { //Showing a concrete Member, property, ...					
+@@ -1701,7 +1703,7 @@ ExtLoop:
+ 
+ 		void OnOkClicked (object sender, EventArgs a)
+ 		{
+-			CommentService service = new CommentService();
++			//CommentService service = new CommentService();
+ 			// todo
+ 			newcomment.Hide ();
+ 		}
+@@ -2020,7 +2022,7 @@ public class TreeBrowser {
+ 		if (tree_view.Selection.GetSelected (out model, out iter)){
+ 			Node n = (Node) iter_to_node [iter];
+ 			
+-			string url = n.URL;
++			string url = n.PublicUrl;
+ 			Node match;
+ 			string s;
+ 
+@@ -2048,7 +2050,7 @@ public class TreeBrowser {
+ 				return;
+ 			}
+ 
+-			((Browser)browser).Render ("<h1>Unhandled URL</h1>" + "<p>Functionality to view the resource <i>" + n.URL + "</i> is not available on your system or has not yet been implemented.</p>", null, url);
++			((Browser)browser).Render ("<h1>Unhandled URL</h1>" + "<p>Functionality to view the resource <i>" + n.PublicUrl + "</i> is not available on your system or has not yet been implemented.</p>", null, url);
+ 		}
+ 	}
+ }
+@@ -2712,7 +2714,7 @@ public class Tab : Notebook {
+ 		string [] uSplit = EditingUtils.ParseEditUrl (edit_url);
+ 		
+ 		if (uSplit[0].StartsWith ("monodoc:"))
+-			EditingUtils.SaveChange (edit_url, browser.help_tree, edit_node, EcmaHelpSource.GetNiceUrl (browser.CurrentTab.CurrentNode));
++			EditingUtils.SaveChange (edit_url, browser.help_tree, edit_node, GetNiceUrl (browser.CurrentTab.CurrentNode));
+ 		else if (uSplit[0].StartsWith ("file:"))
+ 			EditingUtils.SaveChange (edit_url, browser.help_tree, edit_node, String.Empty);
+ 		else
+@@ -2721,6 +2723,49 @@ public class Tab : Notebook {
+ 		history.ActivateCurrent ();
+ 	}
+ 
++	public static string GetNiceUrl (Node node) {
++		if (node.Element.StartsWith("N:"))
++			return node.Element;
++		string name, full;
++		int bk_pos = node.Caption.IndexOf (' ');
++		// node from an overview
++		if (bk_pos != -1) {
++			name = node.Caption.Substring (0, bk_pos);
++			full = node.Parent.Caption + "." + name.Replace ('.', '+');
++			return "T:" + full;
++		}
++		// node that lists constructors, methods, fields, ...
++		if ((node.Caption == "Constructors") || (node.Caption == "Fields") || (node.Caption == "Events") 
++			|| (node.Caption == "Members") || (node.Caption == "Properties") || (node.Caption == "Methods")
++			|| (node.Caption == "Operators")) {
++			bk_pos = node.Parent.Caption.IndexOf (' ');
++			name = node.Parent.Caption.Substring (0, bk_pos);
++			full = node.Parent.Parent.Caption + "." + name.Replace ('.', '+');
++			return "T:" + full + "/" + node.Element; 
++		}
++		int pr_pos = node.Caption.IndexOf ('(');
++		// node from a constructor
++		if (node.Parent.Element == "C") {
++			name = node.Parent.Parent.Parent.Caption;
++			int idx = node.PublicUrl.IndexOf ('/');
++			return node.PublicUrl[idx+1] + ":" + name + "." + node.Caption.Replace ('.', '+');
++		// node from a method with one signature, field, property, operator
++		} else if (pr_pos == -1) {
++			bk_pos = node.Parent.Parent.Caption.IndexOf (' ');
++			name = node.Parent.Parent.Caption.Substring (0, bk_pos);
++			full = node.Parent.Parent.Parent.Caption + "." + name.Replace ('.', '+');
++			int idx = node.PublicUrl.IndexOf ('/');
++			return node.PublicUrl[idx+1] + ":" + full + "." + node.Caption;
++		// node from a method with several signatures
++		} else {
++			bk_pos = node.Parent.Parent.Parent.Caption.IndexOf (' ');
++			name = node.Parent.Parent.Parent.Caption.Substring (0, bk_pos);
++			full = node.Parent.Parent.Parent.Parent.Caption + "." + name.Replace ('.', '+');
++			int idx = node.PublicUrl.IndexOf ('/');
++			return node.PublicUrl[idx+1] + ":" + full + "." + node.Caption;
++		}
++	}
++
+ 	void OnCancelEdits (object sender, EventArgs a)
+ 	{
+ 		SetMode (Mode.Viewer);
+@@ -2747,6 +2792,7 @@ public class Tab : Notebook {
+ 			
+ 			StringWriter sw = new StringWriter ();
+ 			XmlWriter w = new XmlTextWriter (sw);
++			var converter = new Monodoc.Generators.Html.Ecma2Html ();
+ 			
+ 			try {
+ 				edit_node.InnerXml = text_editor.Buffer.Text;
+@@ -2760,7 +2806,7 @@ public class Tab : Notebook {
+ 			}
+ 			browser.statusbar.Pop (browser.context_id);
+ 			browser.statusbar.Push (browser.context_id, "XML OK");
+-			string s = HelpSource.BuildHtml (EcmaHelpSource.css_ecma_code, sw.ToString ());
++			string s = converter.Export (sw.ToString (), new Dictionary<string, string> ());
+ 			html_preview.Render(s);
+ 
+ 			return false;
+diff --git a/docbrowser/editing.cs b/docbrowser/editing.cs
+new file mode 100644
+index 0000000..d7c1e32
+--- /dev/null
++++ b/docbrowser/editing.cs
+@@ -0,0 +1,519 @@
++//
++// editing.cs
++//
++// Author:
++//   Ben Maurer (bmaurer at users.sourceforge.net)
++//
++// (C) 2003 Ben Maurer
++//
++
++using System;
++using System.Collections;
++using System.Collections.Specialized;
++using System.IO;
++using System.Text;
++using System.Xml;
++using System.Xml.Serialization;
++using System.Xml.XPath;
++using System.Web;
++
++namespace Monodoc {
++	public class EditingUtils {
++		
++		public static string FormatEditUri (string document_identifier, string xpath)
++		{
++			return String.Format ("edit:{0}@{1}", HttpUtility.UrlEncode (document_identifier),
++				HttpUtility.UrlEncode (xpath));
++		}
++		
++		public static string GetXPath (XPathNavigator n)
++		{
++			switch (n.NodeType) {
++				case XPathNodeType.Root: return "/";
++				case XPathNodeType.Attribute: {
++					string ret = "@" + n.Name;
++					n.MoveToParent ();
++					string s = GetXPath (n);
++					return s + (s == "/" ? "" : "/") + ret;
++				}
++
++				case XPathNodeType.Element: {
++					string ret = n.Name;
++					int i = 1;
++					while (n.MoveToPrevious ()) {
++						if (n.NodeType == XPathNodeType.Element && n.Name == ret)
++							i++;
++					}
++					ret += "[" + i + "]";
++					if (n.MoveToParent ()) {
++						string s = GetXPath (n);
++						return s + (s == "/" ? "" : "/") + ret;
++					}
++				}
++				break;
++			}
++			throw new Exception ("node type not supported for editing");
++			
++		}
++		
++		public static XmlNode GetNodeFromUrl (string url, RootTree tree)
++		{
++			Console.WriteLine ("Url is: {0}", url);
++			string [] uSplit = ParseEditUrl (url);
++			Console.WriteLine ("Results are: {0}\n{1}\n{2}", uSplit [0], uSplit [1], uSplit [2]);
++			
++			string xp = uSplit [2];
++			string id =  uSplit [1];
++			
++			XmlDocument d;
++			
++			if (uSplit[0].StartsWith("monodoc:///")) {
++				int prov = int.Parse (uSplit [0].Substring("monodoc:///".Length));
++				d = tree.GetHelpSourceFromId (prov).GetHelpXmlWithChanges (id);
++			} else if (uSplit[0].StartsWith("file:")) {
++				d = new XmlDocument();
++				d.PreserveWhitespace = true;
++				d.Load(uSplit[0].Substring(5));
++			} else {
++				throw new NotImplementedException("Don't know how to load " + url); 
++			}			
++			
++			return d.SelectSingleNode (xp);
++				
++		}
++		
++		public static void SaveChange (string url, RootTree tree, XmlNode node, string node_url)
++		{
++			/*string [] uSplit = ParseEditUrl (url);
++		
++			string xp = uSplit [2];
++			string id =  uSplit [1];
++						
++			if (uSplit[0].StartsWith("monodoc:///")) {
++				int prov = int.Parse (uSplit [0].Substring("monodoc:///".Length));
++				HelpSource hs = tree.GetHelpSourceFromId (prov);
++				
++				changes.AddChange (hs.Name, hs.GetRealPath (id), xp, node, node_url);
++				changes.Save ();
++			} else if (uSplit[0].StartsWith("file:")) {
++				uSplit[0] = uSplit[0].Substring(5);
++				
++				XmlDocument d = new XmlDocument();
++				d.PreserveWhitespace = true;
++				d.Load(uSplit[0]);
++				
++				XmlNode original = d.SelectSingleNode(xp);
++				original.ParentNode.ReplaceChild(d.ImportNode(node, true), original);
++				
++				d.Save(uSplit[0]);
++			} else {				
++				throw new NotImplementedException("Don't know how to save to " + url); 
++			}*/
++		}
++
++		public static void RemoveChange (string url, RootTree tree)
++		{
++			/*string [] uSplit = ParseEditUrl (url);
++		
++			string xp = uSplit [2];
++			string id = uSplit [1];
++						
++			if (uSplit[0].StartsWith("monodoc:///")) {
++				int prov = int.Parse (uSplit [0].Substring("monodoc:///".Length));
++				HelpSource hs = tree.GetHelpSourceFromId (prov);
++				
++				changes.RemoveChange (hs.Name, hs.GetRealPath (id), xp);
++				changes.Save ();
++			} else if (uSplit[0].StartsWith("file:")) {
++				//TODO: Not implemented
++			}*/
++		}
++		
++		public static void RenderEditPreview (string url, RootTree tree, XmlNode new_node, XmlWriter w)
++		{
++			string [] uSplit = ParseEditUrl (url);
++		
++			if (uSplit[0].StartsWith("monodoc:///")) {
++				int prov = int.Parse (uSplit [0].Substring("monodoc:///".Length));
++				HelpSource hs = tree.GetHelpSourceFromId (prov);
++				hs.RenderPreviewDocs (new_node, w);
++			} else {
++				foreach (HelpSource hs in tree.HelpSources) {
++					if (hs is Monodoc.Providers.EcmaUncompiledHelpSource) {
++						// It doesn't matter which EcmaHelpSource is chosen.
++						hs.RenderPreviewDocs (new_node, w);
++						break;
++					}
++				}				
++			}
++		}
++		
++		public static string [] ParseEditUrl (string url)
++		{
++			if (!url.StartsWith ("edit:"))
++				throw new Exception ("wtf");
++			
++			string [] parts = url.Split ('@');
++			if (parts.Length != 2)
++				throw new Exception (String.Format ("invalid editing url {0}", parts.Length));
++			
++			string xp = HttpUtility.UrlDecode (parts [1]);
++			parts = HttpUtility.UrlDecode (parts [0]).Substring ("edit:".Length).Split ('@');
++			if (parts.Length == 1) {
++				string p = parts[0];
++				parts = new string[2];
++				parts[0] = p;
++				parts[1] = "";
++			}
++			
++			return new string [] {parts [0], parts [1], xp};
++		}
++		
++		public static void AccountForChanges (XmlDocument d, string doc_set, string real_file)
++		{
++			try {
++				FileChangeset fcs = changes.GetChangeset (doc_set, real_file);
++				if (fcs == null)
++					return;
++				
++				foreach (Change c in fcs.Changes) {
++					// Filter out old changes
++					if (c.FromVersion != RootTree.MonodocVersion)
++						continue;
++					
++					XmlNode old = d.SelectSingleNode (c.XPath);
++					if (old != null)
++						old.ParentNode.ReplaceChild (d.ImportNode (c.NewNode, true), old);
++				}
++			} catch {
++				return;
++			}
++		}
++	
++		public static GlobalChangeset changes = GlobalChangeset.Load ();
++
++		static public GlobalChangeset GetChangesFrom (int starting_serial_id)
++		{
++			return changes.GetFrom (starting_serial_id);
++		}
++	}
++
++#region Data Model
++	public class GlobalChangeset {
++
++		public static XmlSerializer serializer = new XmlSerializer (typeof (GlobalChangeset));
++		static string changeset_file = Path.Combine (SettingsHandler.Path, "changeset.xml");
++		static string changeset_backup_file = Path.Combine (SettingsHandler.Path, "changeset.xml~");
++	
++		public static GlobalChangeset Load ()
++		{
++			try {
++				if (File.Exists (changeset_file))
++					return LoadFromFile (changeset_file);
++			} catch {}
++			
++			return new GlobalChangeset ();
++		}
++		
++		public static GlobalChangeset LoadFromFile (string fileName)
++		{
++			using (Stream s = File.OpenRead (fileName)) {
++				return (GlobalChangeset) serializer.Deserialize (s);
++			}
++		}			
++		
++		public void Save ()
++		{
++			SettingsHandler.EnsureSettingsDirectory ();
++
++			try {    
++				if (File.Exists(changeset_file))  // create backup copy
++					File.Copy (changeset_file, changeset_backup_file, true);
++           
++				using (FileStream fs = File.Create (changeset_file)){
++					serializer.Serialize (fs, this);
++				}
++			} catch (Exception e) {
++				Console.WriteLine ("Error while saving changes. " + e);
++				if (File.Exists(changeset_backup_file))  // if saving fails then use backup if we have one				
++					File.Copy (changeset_backup_file, changeset_file, true);
++				else
++					File.Delete (changeset_file);   // if no backup, delete invalid changeset 
++			}
++		}
++		
++		static void VerifyDirectoryExists (DirectoryInfo d) {
++			if (d.Exists)
++				return;
++
++			VerifyDirectoryExists (d.Parent);
++			d.Create ();
++		}
++
++		[XmlElement ("DocSetChangeset", typeof (DocSetChangeset))]
++		public ArrayList DocSetChangesets = new ArrayList ();
++
++		public FileChangeset GetChangeset (string doc_set, string real_file)
++		{
++			foreach (DocSetChangeset dscs in DocSetChangesets) {
++				if (dscs.DocSet != doc_set) 
++					continue;
++			
++				foreach (FileChangeset fcs in dscs.FileChangesets) {
++					if (fcs.RealFile == real_file)
++						return fcs;
++				}
++			}
++			
++			return null;
++		}
++
++		public int Count {
++			get {
++				int count = 0;
++				
++				foreach (DocSetChangeset dscs in DocSetChangesets){
++					foreach (FileChangeset fcs in dscs.FileChangesets){
++						count += fcs.Changes.Count;
++					}
++				}
++
++				return count;
++			}
++		}
++
++		Change NewChange (string xpath, XmlNode new_node, string node_url)
++		{
++			Change new_change = new Change ();
++			new_change.XPath = xpath;
++			new_change.NewNode = new_node;
++			new_change.NodeUrl = node_url;
++
++			Console.WriteLine ("New serial:" + SettingsHandler.Settings.SerialNumber);
++			new_change.Serial = SettingsHandler.Settings.SerialNumber;
++
++			return new_change;
++		}
++		
++		public void AddChange (string doc_set, string real_file, string xpath, XmlNode new_node, string node_url)
++		{
++			FileChangeset new_file_change_set;
++			Change new_change = NewChange (xpath, new_node, node_url);
++			
++			if (real_file == null)
++				throw new Exception ("Could not find real_file. Please talk to Miguel or Ben about this");
++			
++			foreach (DocSetChangeset dscs in DocSetChangesets) {
++				if (dscs.DocSet != doc_set) 
++					continue;
++
++				foreach (FileChangeset fcs in dscs.FileChangesets) {
++					if (fcs.RealFile != real_file)
++						continue;
++					
++					foreach (Change c in fcs.Changes) {
++						if (c.XPath == xpath) {
++							c.NewNode = new_node;
++							c.Serial = SettingsHandler.Settings.SerialNumber;
++							return;
++						}
++					}
++
++					fcs.Changes.Add (new_change);
++					return;
++					
++				}
++				
++				new_file_change_set = new FileChangeset ();
++				new_file_change_set.RealFile = real_file;
++				new_file_change_set.Changes.Add (new_change);
++				dscs.FileChangesets.Add (new_file_change_set);
++				return;
++					
++			}
++			
++			DocSetChangeset new_dcs = new DocSetChangeset ();
++			new_dcs.DocSet = doc_set;
++			
++			new_file_change_set = new FileChangeset ();
++			new_file_change_set.RealFile = real_file;
++			
++			new_file_change_set.Changes.Add (new_change);
++			new_dcs.FileChangesets.Add (new_file_change_set);
++			DocSetChangesets.Add (new_dcs);
++		}
++
++		public void RemoveChange (string doc_set, string real_file, string xpath)
++		{
++			if (real_file == null)
++				throw new Exception ("Could not find real_file. Please talk to Miguel or Ben about this");
++			
++			for (int i = 0; i < DocSetChangesets.Count; i++) {
++				DocSetChangeset dscs = DocSetChangesets [i] as DocSetChangeset;
++				if (dscs.DocSet != doc_set) 
++					continue;
++
++				for (int j = 0; j < dscs.FileChangesets.Count; j++) {
++					FileChangeset fcs = dscs.FileChangesets [j] as FileChangeset;
++					if (fcs.RealFile != real_file)
++						continue;
++
++					for (int k = 0; k < fcs.Changes.Count; k++) {
++						Change c = fcs.Changes [k] as Change;
++						if (c.XPath == xpath) {
++							fcs.Changes.Remove (c);
++							break;
++						}
++					}
++					if (fcs.Changes.Count == 0)
++						dscs.FileChangesets.Remove (fcs);
++				}
++
++				if (dscs.FileChangesets.Count == 0)
++					DocSetChangesets.Remove (dscs);
++			}
++		}
++
++		public GlobalChangeset GetFrom (int starting_serial_id)
++		{
++			GlobalChangeset s = null;
++			
++			foreach (DocSetChangeset dscs in DocSetChangesets){
++				object o = dscs.GetFrom (starting_serial_id);
++				if (o == null)
++					continue;
++				if (s == null)
++					s = new GlobalChangeset ();
++				s.DocSetChangesets.Add (o);
++			}
++			return s;
++		}
++	}
++	
++	public class DocSetChangeset {
++		[XmlAttribute] public string DocSet;
++		
++		[XmlElement ("FileChangeset", typeof (FileChangeset))]
++		public ArrayList FileChangesets = new ArrayList ();
++
++		public DocSetChangeset GetFrom (int starting_serial_id)
++		{
++			DocSetChangeset dsc = null;
++			
++			foreach (FileChangeset fcs in FileChangesets){
++				object o = fcs.GetFrom (starting_serial_id);
++				if (o == null)
++					continue;
++				if (dsc == null){
++					dsc = new DocSetChangeset ();
++					dsc.DocSet = DocSet;
++				}
++				dsc.FileChangesets.Add (o);
++			}
++			return dsc;
++		}
++	}
++	
++	public class FileChangeset {
++		[XmlAttribute] public string RealFile;
++		
++		[XmlElement ("Change", typeof (Change))]
++		public ArrayList Changes = new ArrayList ();
++
++		public FileChangeset GetFrom (int starting_serial_id)
++		{
++			FileChangeset fcs = null;
++
++			foreach (Change c in Changes){
++				if (c.Serial < starting_serial_id)
++					continue;
++				if (fcs == null){
++					fcs = new FileChangeset ();
++					fcs.RealFile = RealFile;
++				}
++				fcs.Changes.Add (c);
++			}
++			return fcs;
++		}
++	}
++	
++	public class Change {
++		[XmlAttribute] public string XPath;
++		[XmlAttribute] public int FromVersion = RootTree.MonodocVersion;
++		[XmlAttribute] public string NodeUrl;
++		
++		public XmlNode NewNode;
++
++		public int Serial;
++
++		bool applied = false;
++		
++		//
++		// These are not a property, because we dont want them serialized;
++		// Only used by the Admin Client.
++		//
++		public bool Applied ()
++		{
++			return applied;
++		}
++
++		public void SetApplied (bool value)
++		{
++			applied = value;
++		}
++	}
++#endregion
++	
++	public class EditMerger {
++		GlobalChangeset changeset;
++		ArrayList targetDirs;
++		
++		public EditMerger (GlobalChangeset changeset, ArrayList targetDirs)
++		{
++			this.changeset = changeset;
++			this.targetDirs = targetDirs;
++		}
++		
++		public void Merge ()
++		{
++			foreach (DocSetChangeset dsc in changeset.DocSetChangesets) {
++				bool merged = false;
++				foreach (string path in targetDirs) {
++					if (File.Exists (Path.Combine (path, dsc.DocSet + ".source"))) {
++						Merge (dsc, path);
++						merged = true;
++						break;
++					}
++				}
++				if (!merged) Console.WriteLine ("Could not merge docset {0}", dsc.DocSet);
++			}
++		}
++		
++		void Merge (DocSetChangeset dsc, string path)
++		{
++			Console.WriteLine ("Merging changes in {0} ({1})", dsc.DocSet, path);
++			
++			foreach (FileChangeset fcs in dsc.FileChangesets) {
++				if (File.Exists (Path.Combine (path, fcs.RealFile)))
++					Merge (fcs, path);
++				else
++					Console.WriteLine ("\tCould not find file {0}", Path.Combine (path, fcs.RealFile));
++			}
++		}
++		
++		void Merge (FileChangeset fcs, string path)
++		{
++			XmlDocument d = new XmlDocument ();
++			d.Load (Path.Combine (path, fcs.RealFile));
++			
++			foreach (Change c in fcs.Changes) {
++				XmlNode old = d.SelectSingleNode (c.XPath);
++				if (old != null)
++					old.ParentNode.ReplaceChild (d.ImportNode (c.NewNode, true), old);
++			}
++			
++			d.Save (Path.Combine (path, fcs.RealFile));
++		}
++	}
++}
++
================================================================

---- gitweb:

http://git.pld-linux.org/gitweb.cgi/packages/mono-tools.git/commitdiff/c436faf8c58232bc42eccbf8dbaebc06739eb251



More information about the pld-cvs-commit mailing list