Fluent CSV
/// <summary>
/// Fluent Style CSV generation, my own (SS) devising to avoid potential problems with column mappings, simple concept though that
/// something defines both the name and contents (as a function of the source row object type) for each column. Allowing consistent generation of
/// a valid CSV.
/// </summary>
public class CSV : IDisposable
{
public CSV()
{
}
//these are constants purely because of their repetition, which makes them harder to read than the actual text versions
public const string Quote = "\"";
public const string CommaQuote = ",\"";
public const string EscapedQuote = "\"\"";
/// <summary>
/// Generate a CSV file using a collection of header-function pairs
/// </summary>
/// <typeparam name="T">The object type that this CSV will be built around</typeparam>
/// <param name="rows">The collection of rows that contains the data</param>
/// <param name="columns">The definition of headers/functions</param>
/// <returns>The CSV file in string format (ready for saving)</returns>
public static string FluentCSV<T>(T[] rows, params CSVColumnPairing<T>[] columns)
{
StringBuilder sb = new StringBuilder();
sb.AppendLine(CSVRow(columns.Select(c=>c.Header).ToArray()));
foreach (var result in rows)
{
var funcResults = (from f in columns
select f.Field(result)).ToArray();
sb.AppendLine(CSVRow(funcResults));
}
return sb.ToString();
}
/// <summary>
/// Define a header, field function pair
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="header"></param>
/// <param name="field"></param>
/// <returns></returns>
public static CSVColumnPairing<T> Col<T>(string header, Func<T, string> field)
{
CSVColumnPairing<T> pair = new CSVColumnPairing<T>();
pair.Field = field;
pair.Header = header;
return pair;
}
/// <summary>
/// A class that wraps the CSV column pair
/// </summary>
/// <typeparam name="T"></typeparam>
public class CSVColumnPairing<T>
{
/// <summary>
/// Create a CSV column pair
/// </summary>
public CSVColumnPairing()
{
}
/// <summary>
/// Create a CSV Column pair and supply the values
/// </summary>
/// <param name="Header"></param>
/// <param name="Field"></param>
public CSVColumnPairing(string Header, Func<T, string> Field)
{
this.Header = Header;
this.Field = Field;
}
/// <summary>
/// The header for for the column
/// </summary>
public string Header { get; set; }
/// <summary>
/// The function to express the column with
/// </summary>
public Func<T, string> Field { get; set; }
}
/// <summary>
/// Define a CSV row, which consists of multiple items of data in an array
/// </summary>
/// <param name="items">The items</param>
/// <returns>The row as a string</returns>
private static string CSVRow(params string[] items)
{
StringBuilder sb = new StringBuilder();
bool prefix = false;
foreach (var i in items)
{
var item = i ?? "";
if (item.Contains(",") || item.Contains("\n"))
{
item = "\"" + item.Replace("\"", "\"\"") + "\""; //add quotes around anything that contains a comma or line return and escape any quotes present in it at the same time.
}
if (prefix)
{
sb.Append(",");
}
sb.Append(item);
if (!prefix)
prefix = true;
}
return sb.ToString();
}
private object fieldStoreLockObject = new object();
private Dictionary<string, DynamicFieldStore> fieldStoreCache = new Dictionary<string, DynamicFieldStore>();
/// <summary>
/// Process header aliases
/// </summary>
/// <param name="propertyInfo"></param>
/// <returns></returns>
private string processHeaderAlias(PropertyInfo propertyInfo)
{
//the thing to look for: propertyInfo.DeclaringType.Name
if (this.state != null)
{
string declaringType = propertyInfo.DeclaringType.Name;
DynamicFieldStore fieldStore = null;
lock (fieldStoreLockObject)
{
if (!fieldStoreCache.ContainsKey(declaringType))
{
var customFields = DynamicFieldHelper.GetFieldsForEntity(this.state, propertyInfo.DeclaringType.Name);
fieldStore = customFields;
fieldStoreCache.Add(declaringType, customFields);
}
else
{
fieldStore = fieldStoreCache[declaringType];
}
}
if (fieldStore != null)
{
if (fieldStore.ColumnConfiguration.ContainsKey(propertyInfo.Name))
{
return fieldStore.ColumnConfiguration[propertyInfo.Name].ColumnAlias;
}
}
}
return propertyInfo.Name;
}
public string RenderObjectCollectionAsCSV(object modelEntity)
{
return RenderObjectCollectionAsCSV(modelEntity, new string[0], new IColumnForEntity[0]);
}
public string RenderObjectCollectionAsCSV(object modelEntity, string[] skipProperties, IColumnForEntity[] extraColumns)
{
StringBuilder sb = new StringBuilder();
if (modelEntity is IEnumerable)
{
IEnumerable array = modelEntity as IEnumerable;
//add the titles
var enumerator = array.GetEnumerator();
enumerator.MoveNext();
var headerLine = renderObject(enumerator.Current, "", false, true, skipProperties, extraColumns);
sb.AppendLine(headerLine);
enumerator.Reset();
foreach (var line in array)
{
string textLine = "";
textLine = renderObject(line, textLine, false, false, skipProperties, extraColumns);
sb.AppendLine(textLine);
}
}
else
{
sb.AppendLine(renderObject(modelEntity, "", false, true, skipProperties, extraColumns)); //title
sb.AppendLine(renderObject(modelEntity, "", false, false, skipProperties, extraColumns)); //content
}
return sb.ToString();
}
private string renderObject(object line, string existingline, bool doNotRecurse, bool titlesOnly, string[] skipProperties, IColumnForEntity[] extraColumns)
{
//to ensure that we'll error and skip elements the same when doing titles, the normal line is generated at the same time as the title line.
//this seemed weird when doing it but it was the easiest way to guarantee it was correct
bool first = true;
var unknownObject = line.GetType();
string headerLine = existingline;
if (existingline.Length > 0)
{
first = false;
}
foreach (var propertyInfo in unknownObject.GetProperties())
{
if (IsMethodSuitableForExport(propertyInfo) && !skipProperties.Contains(propertyInfo.Name))
{
object item = null;
try
{
item = propertyInfo.GetValue(line, null);
if (!propertyInfo.PropertyType.FullName.Contains("EntitySet"))
{
if (propertyInfo.PropertyType != typeof(String)
&& propertyInfo.PropertyType != typeof(DateTime)
&& propertyInfo.PropertyType.IsClass
&& !propertyInfo.PropertyType.IsArray
&& !doNotRecurse) //expand single objects, we don't support arrays of them (yet)
{
if (item != null)
{
/*existingline += first ? quote + item.ToString() + quote
: commaquote + item.ToString() + quote; //we output its string value
if (titlesOnly)
{
headerLine += first ? quote + propertyInfo.Name.ToString() + quote
: commaquote + propertyInfo.Name.ToString() + quote;
}*/
if (titlesOnly)
{
headerLine = renderObject(item, headerLine, true, titlesOnly, new string[0], extraColumns);
}
else
{
existingline = renderObject(item, existingline, true, titlesOnly, new string[0], extraColumns);
}
}
else
{
existingline += first ? ""
: ","; //otherwise one empty entry
if (titlesOnly)
{
headerLine += first ? ""
: ",";
}
}
}
else
{
if (item != null)
{
if (item is DateTime)
{
var i = (DateTime)item;
existingline += first ? Quote + i.ToString() + Quote
: CommaQuote + i.ToString() + Quote;
if (titlesOnly)
{
headerLine += first ? Quote + processHeaderAlias(propertyInfo) + Quote
: CommaQuote + processHeaderAlias(propertyInfo) + Quote;
}
}
else
{
existingline += first ? Quote + item.ToString().Replace(Quote, EscapedQuote) + Quote
: CommaQuote + item.ToString().Replace(Quote, EscapedQuote) + Quote;
if (titlesOnly)
{
headerLine += first ? Quote + processHeaderAlias(propertyInfo) + Quote
: CommaQuote + processHeaderAlias(propertyInfo) + Quote;
}
}
}
else
{
existingline += first ? ""
: ",";
if (titlesOnly)
{
headerLine += first ? Quote + processHeaderAlias(propertyInfo) + Quote
: CommaQuote + processHeaderAlias(propertyInfo) + Quote;
}
}
}
}
}
catch (Exception)
{
existingline += first ? ""
: ",";
if (titlesOnly)
{
headerLine += first ? Quote + processHeaderAlias(propertyInfo) + Quote
: CommaQuote + processHeaderAlias(propertyInfo) + Quote;
}
}
first = false;
}
}
if (titlesOnly)
{
if (extraColumns != null)
{
foreach (var extraColumn in extraColumns)
{
if (extraColumn.ColumnAppliesToEntity(line))
{
headerLine += "," + extraColumn.ColumnName;
}
}
}
return headerLine;
}
else
{
if (extraColumns != null)
{
foreach (var extraColumn in extraColumns)
{
if (extraColumn.ColumnAppliesToEntity(line))
{
existingline += "," + extraColumn.Execute(line);
}
}
}
return existingline;
}
}
public static bool IsMethodSuitableForExport(System.Reflection.PropertyInfo propertyInfo)
{
if (Attribute.IsDefined(propertyInfo, typeof (DoNotExportAttribute))) return false;
return propertyInfo.CanRead;
}
/// <summary>
/// Clear up resources a bit earlier explicitly
/// </summary>
/// <remarks>While we have no unmanaged resources, we don't want to keep the field store cache
/// dictionaries around longer than needed as they could be large ish depending how many objects we walked
/// so this way we can use a using statement instead of waiting for a GC slowly</remarks>
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
private bool disposed = false;
protected void Dispose(bool disposing)
{
if (!disposed)
{
fieldStoreCache = null;
state = null; //we also detach from the state store.
disposed = true;
}
}
//no unmanaged so no finaliser
}
No Comments