The .Text blog engine my site runs on is getting pretty crusty. The project was absorbed into Community Server years ago and is no longer maintained.
I want to upgrade to a modern engine but I ran into a problem: there's no good way to import and export .Text blog content to a new engine.
I couldn't find an Atom or BlogML converter for .Text so I wrote my own.
Here's the binary. Pass in the connection string to your dottext database. If you have multiple blogs on one install it will export each in an individual file.
I'm also posting the code here. The few references to .Text converters I found online had broken links. If that happens when I eventually convert my site at least it should be possible to compile from whats left. The code is quick and dirty but it serves its limited purpose.
Once the data is BlogML format it should at least be workable to import it into other blog engines even if it needs a conversion to Atom or a proprietary format. I'm considering using something like http://atomsite.net/ but first things first, I've got to get all the data exported.
The .Text export code:
using System;
using System.Collections.Generic;
using BlogML;
using BlogML.Xml;
using System.Data.SqlClient;
using System.Data;
using System.IO;
using System.Linq;
using System.Linq.Expressions;
namespace dtBlogML
{
class Program
{
private static string ConnectionString;
static void Main(string[] args)
{
if (args.Length == 0)
{
Console.WriteLine(String.Format("{0}Exports dottext blogs to BlogML{0}Each blog is output in the current directory to a separate xml file." +
"{0}Connection string to the dottext database must be passed in." +
"{0}usage:{0}dtBlogML " +
"{0}ex: dtBlogML \"Data Source=0.0.0.0;Initial Catalog=dottext;Persist Security Info=True;User ID=user;Password=password\"", Environment.NewLine));
return;
}
ConnectionString = args[0];
//gather up the base blogs
DataSet dsBlogs = ExecuteDataset(ConnectionString, "select * from blog_config");
foreach (DataRow row in dsBlogs.Tables[0].Rows)
{
//each entry represents a different blog config complete with posts, comments, articles, etc..
BlogMLBlog blog = new BlogMLBlog();
blog.RootUrl = row["host"].ToString();
blog.Title = row["title"].ToString();
Console.WriteLine("Exporting " + blog.Title);
blog.SubTitle = row["subtitle"].ToString();
blog.Authors.Add( new BlogMLAuthor() { Email = row["email"].ToString(), ID = row["username"].ToString(), Title= row["author"].ToString() });
//grab the categories in one go and add them as need
//only interested in categories 1 and 2 in the dottext database
DataSet catDS = ExecuteDataset(ConnectionString,
"select * from blog_LinkCategories where blogid = @blogid and CategoryType in (1,2)",
new SqlParameter[] { new SqlParameter("@blogid", (int)row["blogid"]) });
foreach (DataRow catRow in catDS.Tables[0].Rows)
{
//init the category data and add to the top level blog entry
//posts will be marked with the category ref where needed
BlogMLCategory category = new BlogMLCategory()
{
Title = catRow["title"].ToString(),
ID = catRow["categoryid"].ToString(),
Description = catRow["description"].ToString()
};
blog.Categories.Add(category);
}
//load entries and articles
LoadEntries(blog, (int)row["blogid"]);
BlogMLSerializer.Serialize(new FileStream(Path.Combine(Environment.CurrentDirectory, "blogml-" + blog.Title + ".xml"), FileMode.Create), blog);
}
Console.WriteLine("done....");
Console.ReadLine();
}
private static void LoadEntries(BlogMLBlog blog, int blogid)
{
//can't just join the categories on the posts since its one to many and we'll dupe the posts
//quick and dirty: just load references in memory - shouldn't be too bad... i hope
DataSet catRefDS = ExecuteDataset(ConnectionString,
"select postid, categoryid from blog_Links where blogid = @blogid and postid > -1",
new SqlParameter[] { new SqlParameter("@blogid", blogid) });
//load the entries, articles, and comments
//to load everything in one go the index of posts/articles needs to be tracked
//the posts can't be pulled up later by id so do it manually
Dictionary table = new Dictionary();
SqlDataReader sdr = ExecuteDatareader(ConnectionString,
@"select * from blog_Content c left outer join blog_EntryViewCount evc on
evc.EntryID = c.ID
where c.BlogID = @blogid",
new SqlParameter[] {new SqlParameter("@BlogID", blogid)});
while (sdr.Read())
{
//post or article
if ((int)sdr["posttype"] == 1 || (int)sdr["posttype"] == 2)
{
//quick init what we can
BlogMLPost post = new BlogMLPost()
{
Title = (string)sdr["title"],
ID = sdr["ID"].ToString(),
DateCreated = sdr.GetDateTime(sdr.GetOrdinal("DateAdded")),
DateModified = sdr.GetDateTime(sdr.GetOrdinal("Dateupdated")),
PostUrl = sdr["entryname"].ToString() ?? sdr["id"].ToString(),
Views = 0,
HasExcerpt = (sdr["description"] == null),
Content = new BlogMLContent() { Text = sdr["text"].ToString() }
};
//.text is single author so just add it back in
post.Authors.Add(blog.Authors[0].ID);
//post or article?
post.PostType = (int)sdr["posttype"] == 1 ? BlogPostTypes.Normal : BlogPostTypes.Article;
//may end up being the final part of the url on import
if (!(sdr["entryname"] is DBNull) && sdr["entryname"].ToString() != "")
{
post.PostUrl = sdr["entryname"].ToString();
}
else
{
post.PostUrl = sdr["id"].ToString();
}
//any excerpt?
if (post.HasExcerpt)
{
post.Excerpt = new BlogMLContent() { Text = sdr["description"].ToString() };
}
//add the view count
if (!(sdr["WebCount"] is DBNull))
{
post.Views = Convert.ToUInt32(sdr["WebCount"]);
}
//find the post categories if any
var query = catRefDS.Tables[0].AsEnumerable()
.Where(pid => pid.Field("postid") == Convert.ToInt32(post.ID));
//add the categories to the post/article
//could have more than one
foreach (DataRow cat in query)
{
Console.WriteLine(cat["categoryid"]);
post.Categories.Add(cat["categoryid"].ToString());
}
//add post to the blog
blog.Posts.Add(post);
//add to lookup table for comment addition later on
table[post.ID] = post;
Console.WriteLine("...... exported " + post.Title);
}
//comments
if ((int)sdr["posttype"] == 3)
{
BlogMLComment cmnt = new BlogMLComment()
{
DateCreated = sdr.GetDateTime(sdr.GetOrdinal("dateadded")),
DateModified = sdr.GetDateTime(sdr.GetOrdinal("dateupdated")),
ID = sdr["id"].ToString(),
Title = sdr["title"].ToString(),
UserName = sdr["author"].ToString(),
UserEMail = sdr["email"].ToString(),
UserUrl = sdr["titleurl"].ToString(),
Content = new BlogMLContent() { Text = sdr["text"].ToString() }
};
//add to the post
table[sdr["parentid"].ToString()].Comments.Add(cmnt);
}
}
}
#region Util
public static DataSet ExecuteDataset(string connectionString, string queryString)
{
return ExecuteDataset(connectionString, queryString, new SqlParameter[] { });
}
public static DataSet ExecuteDataset(string connectionString, string queryString, SqlParameter[] parameters)
{
DataSet dataset = new DataSet();
using (SqlConnection connection = new SqlConnection(connectionString))
{
SqlDataAdapter adapter = new SqlDataAdapter();
adapter.SelectCommand = new SqlCommand(queryString, connection);
foreach (SqlParameter parameter in parameters)
{
adapter.SelectCommand.Parameters.Add(parameter);
}
adapter.Fill(dataset);
return dataset;
}
}
public static SqlDataReader ExecuteDatareader(string connectionString, string queryString)
{
return ExecuteDatareader(connectionString, queryString, new SqlParameter[] { });
}
public static SqlDataReader ExecuteDatareader(string connectionString, string queryString, SqlParameter[] parameters)
{
SqlConnection connection = new SqlConnection(connectionString);
connection.Open();
SqlCommand cmd = connection.CreateCommand();
cmd.CommandText = queryString;
foreach (SqlParameter parameter in parameters)
{
cmd.Parameters.Add(parameter);
}
return cmd.ExecuteReader(CommandBehavior.CloseConnection);
}
#endregion
}
}